Clean broken entries from the Bazel cache (#8668)

* Clean broken entries from the Bazel cache

This is hopefully a somewhat reasonable workaround for the "output not
created" errors that keep annoying us.

For now, this is just part of the hourly cronjob but we could move it
somewhere else if desired.

changelog_begin
changelog_end

* Fix GCS credentials

changelog_begin
changelog_end
This commit is contained in:
Moritz Kiefer 2021-01-28 18:57:09 +01:00 committed by GitHub
parent 83496abf62
commit c89e00342d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 694 additions and 15 deletions

52
3rdparty/haskell/BUILD.bazel vendored Normal file
View File

@ -0,0 +1,52 @@
# Copyright (c) 2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
load("//bazel_tools:haskell.bzl", "da_haskell_library")
genrule(
name = "remote_apis_src",
srcs = [
"@com_github_bazelbuild_remote_apis//build/bazel/remote/execution/v2:remote_execution.proto",
"@com_google_protobuf//:well_known_protos",
"@com_github_googleapis_googleapis//google/rpc:status.proto",
],
outs = ["Build/Bazel/Remote/Execution/V2/RemoteExecution.hs"],
cmd = """
set -eoux pipefail
PROTO_FILE="$(location @com_github_bazelbuild_remote_apis//build/bazel/remote/execution/v2:remote_execution.proto)"
PROTO_FILE_NAME="$$(echo "$$PROTO_FILE" | sed 's|^.*\\(build/bazel/.*\\)|\\1|g')"
PROTO_DIR=$$(echo $${PROTO_FILE%$$PROTO_FILE_NAME})
$(location @proto3_suite//:compile-proto-file) \
--proto "$$PROTO_FILE_NAME" \
--includeDir "$$PROTO_DIR" \
--includeDir "external/com_google_protobuf/src" \
--includeDir "external/com_github_googleapis_googleapis" \
--out $(RULEDIR)
""",
tools = [
"@proto3_suite//:compile-proto-file",
],
visibility = ["//visibility:public"],
)
da_haskell_library(
name = "remote_apis",
srcs = [
":remote_apis_src",
"//ledger-api/grpc-definitions:google-protobuf-haskellpb-sources",
"//ledger-api/grpc-definitions:google-rpc-haskellpb-sources",
],
compiler_flags = ["-Wno-deriving-defaults"],
hackage_deps = [
"base",
"bytestring",
"containers",
"deepseq",
"text",
"vector",
"proto3-suite",
"proto3-wire",
],
visibility = ["//visibility:public"],
deps = [],
)

View File

@ -19,6 +19,11 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive", "http_file"
daml_deps()
load("@rules_haskell//haskell:repositories.bzl", "rules_haskell_dependencies")
load("@com_github_bazelbuild_remote_apis//:repository_rules.bzl", "switched_rules_by_language")
switched_rules_by_language(
name = "bazel_remote_apis_imports",
)
rules_haskell_dependencies()

View File

@ -48,6 +48,37 @@ jobs:
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
- template: ci/tell-slack-failed.yml
- job: fix_bazel_cache
timeoutInMinutes: 120
pool:
name: 'ubuntu_20_04'
demands: assignment -equals default
steps:
- checkout: self
- bash: ci/dev-env-install.sh
displayName: 'Build/Install the Developer Environment'
- template: ci/bash-lib.yml
parameters:
var_name: bash-lib
- bash: |
set -euo pipefail
eval "$(dev-env/bin/dade assist)"
bazel build //ci/cron:cron
key=$(mktemp)
cleanup="rm -rf $key ~/.config/gcloud"
trap "$cleanup" EXIT
echo "$GCRED" > $key
gcloud auth activate-service-account --key-file=$key
export BOTO_CONFIG=/dev/null
# 90 minutes should provide enough overlap for an hourly
# cronjob.
./bazel-bin/ci/cron/cron bazel-cache --age 90 --delete
env:
GCRED: $(GOOGLE_APPLICATION_CREDENTIALS_CONTENT)
- template: ci/tell-slack-failed.yml
- job: docker_image
timeoutInMinutes: 60
pool:

View File

@ -555,6 +555,8 @@ exports_files(["stack.exe"], visibility = ["//visibility:public"])
"split",
"stache",
"stm",
"stm-conduit",
"stm-chans",
"swagger2",
"syb",
"system-filepath",

View File

@ -0,0 +1,374 @@
We only need the protobuf message for ActionResult.
To reduce dependencies, we patch out other things
that would pull in extra dependencies or fail to parse
in proto3-suite.
diff --git a/build/bazel/remote/execution/v2/remote_execution.proto b/build/bazel/remote/execution/v2/remote_execution.proto
index 052c222..c988be5 100644
--- a/build/bazel/remote/execution/v2/remote_execution.proto
+++ b/build/bazel/remote/execution/v2/remote_execution.proto
@@ -16,9 +16,6 @@ syntax = "proto3";
package build.bazel.remote.execution.v2;
-import "build/bazel/semver/semver.proto";
-import "google/api/annotations.proto";
-import "google/longrunning/operations.proto";
import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto";
import "google/rpc/status.proto";
@@ -30,329 +27,6 @@ option java_outer_classname = "RemoteExecutionProto";
option java_package = "build.bazel.remote.execution.v2";
option objc_class_prefix = "REX";
-
-// The Remote Execution API is used to execute an
-// [Action][build.bazel.remote.execution.v2.Action] on the remote
-// workers.
-//
-// As with other services in the Remote Execution API, any call may return an
-// error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
-// information about when the client should retry the request; clients SHOULD
-// respect the information provided.
-service Execution {
- // Execute an action remotely.
- //
- // In order to execute an action, the client must first upload all of the
- // inputs, the
- // [Command][build.bazel.remote.execution.v2.Command] to run, and the
- // [Action][build.bazel.remote.execution.v2.Action] into the
- // [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage].
- // It then calls `Execute` with an `action_digest` referring to them. The
- // server will run the action and eventually return the result.
- //
- // The input `Action`'s fields MUST meet the various canonicalization
- // requirements specified in the documentation for their types so that it has
- // the same digest as other logically equivalent `Action`s. The server MAY
- // enforce the requirements and return errors if a non-canonical input is
- // received. It MAY also proceed without verifying some or all of the
- // requirements, such as for performance reasons. If the server does not
- // verify the requirement, then it will treat the `Action` as distinct from
- // another logically equivalent action if they hash differently.
- //
- // Returns a stream of
- // [google.longrunning.Operation][google.longrunning.Operation] messages
- // describing the resulting execution, with eventual `response`
- // [ExecuteResponse][build.bazel.remote.execution.v2.ExecuteResponse]. The
- // `metadata` on the operation is of type
- // [ExecuteOperationMetadata][build.bazel.remote.execution.v2.ExecuteOperationMetadata].
- //
- // If the client remains connected after the first response is returned after
- // the server, then updates are streamed as if the client had called
- // [WaitExecution][build.bazel.remote.execution.v2.Execution.WaitExecution]
- // until the execution completes or the request reaches an error. The
- // operation can also be queried using [Operations
- // API][google.longrunning.Operations.GetOperation].
- //
- // The server NEED NOT implement other methods or functionality of the
- // Operations API.
- //
- // Errors discovered during creation of the `Operation` will be reported
- // as gRPC Status errors, while errors that occurred while running the
- // action will be reported in the `status` field of the `ExecuteResponse`. The
- // server MUST NOT set the `error` field of the `Operation` proto.
- // The possible errors include:
- //
- // * `INVALID_ARGUMENT`: One or more arguments are invalid.
- // * `FAILED_PRECONDITION`: One or more errors occurred in setting up the
- // action requested, such as a missing input or command or no worker being
- // available. The client may be able to fix the errors and retry.
- // * `RESOURCE_EXHAUSTED`: There is insufficient quota of some resource to run
- // the action.
- // * `UNAVAILABLE`: Due to a transient condition, such as all workers being
- // occupied (and the server does not support a queue), the action could not
- // be started. The client should retry.
- // * `INTERNAL`: An internal error occurred in the execution engine or the
- // worker.
- // * `DEADLINE_EXCEEDED`: The execution timed out.
- // * `CANCELLED`: The operation was cancelled by the client. This status is
- // only possible if the server implements the Operations API CancelOperation
- // method, and it was called for the current execution.
- //
- // In the case of a missing input or command, the server SHOULD additionally
- // send a [PreconditionFailure][google.rpc.PreconditionFailure] error detail
- // where, for each requested blob not present in the CAS, there is a
- // `Violation` with a `type` of `MISSING` and a `subject` of
- // `"blobs/{hash}/{size}"` indicating the digest of the missing blob.
- rpc Execute(ExecuteRequest) returns (stream google.longrunning.Operation) {
- option (google.api.http) = { post: "/v2/{instance_name=**}/actions:execute" body: "*" };
- }
-
- // Wait for an execution operation to complete. When the client initially
- // makes the request, the server immediately responds with the current status
- // of the execution. The server will leave the request stream open until the
- // operation completes, and then respond with the completed operation. The
- // server MAY choose to stream additional updates as execution progresses,
- // such as to provide an update as to the state of the execution.
- rpc WaitExecution(WaitExecutionRequest) returns (stream google.longrunning.Operation) {
- option (google.api.http) = { post: "/v2/{name=operations/**}:waitExecution" body: "*" };
- }
-}
-
-// The action cache API is used to query whether a given action has already been
-// performed and, if so, retrieve its result. Unlike the
-// [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage],
-// which addresses blobs by their own content, the action cache addresses the
-// [ActionResult][build.bazel.remote.execution.v2.ActionResult] by a
-// digest of the encoded [Action][build.bazel.remote.execution.v2.Action]
-// which produced them.
-//
-// The lifetime of entries in the action cache is implementation-specific, but
-// the server SHOULD assume that more recently used entries are more likely to
-// be used again.
-//
-// As with other services in the Remote Execution API, any call may return an
-// error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
-// information about when the client should retry the request; clients SHOULD
-// respect the information provided.
-service ActionCache {
- // Retrieve a cached execution result.
- //
- // Implementations SHOULD ensure that any blobs referenced from the
- // [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage]
- // are available at the time of returning the
- // [ActionResult][build.bazel.remote.execution.v2.ActionResult] and will be
- // for some period of time afterwards. The TTLs of the referenced blobs SHOULD be increased
- // if necessary and applicable.
- //
- // Errors:
- //
- // * `NOT_FOUND`: The requested `ActionResult` is not in the cache.
- rpc GetActionResult(GetActionResultRequest) returns (ActionResult) {
- option (google.api.http) = { get: "/v2/{instance_name=**}/actionResults/{action_digest.hash}/{action_digest.size_bytes}" };
- }
-
- // Upload a new execution result.
- //
- // In order to allow the server to perform access control based on the type of
- // action, and to assist with client debugging, the client MUST first upload
- // the [Action][build.bazel.remote.execution.v2.Execution] that produced the
- // result, along with its
- // [Command][build.bazel.remote.execution.v2.Command], into the
- // `ContentAddressableStorage`.
- //
- // Errors:
- //
- // * `INVALID_ARGUMENT`: One or more arguments are invalid.
- // * `FAILED_PRECONDITION`: One or more errors occurred in updating the
- // action result, such as a missing command or action.
- // * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the
- // entry to the cache.
- rpc UpdateActionResult(UpdateActionResultRequest) returns (ActionResult) {
- option (google.api.http) = { put: "/v2/{instance_name=**}/actionResults/{action_digest.hash}/{action_digest.size_bytes}" body: "action_result" };
- }
-}
-
-// The CAS (content-addressable storage) is used to store the inputs to and
-// outputs from the execution service. Each piece of content is addressed by the
-// digest of its binary data.
-//
-// Most of the binary data stored in the CAS is opaque to the execution engine,
-// and is only used as a communication medium. In order to build an
-// [Action][build.bazel.remote.execution.v2.Action],
-// however, the client will need to also upload the
-// [Command][build.bazel.remote.execution.v2.Command] and input root
-// [Directory][build.bazel.remote.execution.v2.Directory] for the Action.
-// The Command and Directory messages must be marshalled to wire format and then
-// uploaded under the hash as with any other piece of content. In practice, the
-// input root directory is likely to refer to other Directories in its
-// hierarchy, which must also each be uploaded on their own.
-//
-// For small file uploads the client should group them together and call
-// [BatchUpdateBlobs][build.bazel.remote.execution.v2.ContentAddressableStorage.BatchUpdateBlobs].
-// For large uploads, the client must use the
-// [Write method][google.bytestream.ByteStream.Write] of the ByteStream API. The
-// `resource_name` is `{instance_name}/uploads/{uuid}/blobs/{hash}/{size}`,
-// where `instance_name` is as described in the next paragraph, `uuid` is a
-// version 4 UUID generated by the client, and `hash` and `size` are the
-// [Digest][build.bazel.remote.execution.v2.Digest] of the blob. The
-// `uuid` is used only to avoid collisions when multiple clients try to upload
-// the same file (or the same client tries to upload the file multiple times at
-// once on different threads), so the client MAY reuse the `uuid` for uploading
-// different blobs. The `resource_name` may optionally have a trailing filename
-// (or other metadata) for a client to use if it is storing URLs, as in
-// `{instance}/uploads/{uuid}/blobs/{hash}/{size}/foo/bar/baz.cc`. Anything
-// after the `size` is ignored.
-//
-// A single server MAY support multiple instances of the execution system, each
-// with their own workers, storage, cache, etc. The exact relationship between
-// instances is up to the server. If the server does, then the `instance_name`
-// is an identifier, possibly containing multiple path segments, used to
-// distinguish between the various instances on the server, in a manner defined
-// by the server. For servers which do not support multiple instances, then the
-// `instance_name` is the empty path and the leading slash is omitted, so that
-// the `resource_name` becomes `uploads/{uuid}/blobs/{hash}/{size}`.
-// To simplify parsing, a path segment cannot equal any of the following
-// keywords: `blobs`, `uploads`, `actions`, `actionResults`, `operations` and
-// `capabilities`.
-//
-// When attempting an upload, if another client has already completed the upload
-// (which may occur in the middle of a single upload if another client uploads
-// the same blob concurrently), the request will terminate immediately with
-// a response whose `committed_size` is the full size of the uploaded file
-// (regardless of how much data was transmitted by the client). If the client
-// completes the upload but the
-// [Digest][build.bazel.remote.execution.v2.Digest] does not match, an
-// `INVALID_ARGUMENT` error will be returned. In either case, the client should
-// not attempt to retry the upload.
-//
-// For downloading blobs, the client must use the
-// [Read method][google.bytestream.ByteStream.Read] of the ByteStream API, with
-// a `resource_name` of `"{instance_name}/blobs/{hash}/{size}"`, where
-// `instance_name` is the instance name (see above), and `hash` and `size` are
-// the [Digest][build.bazel.remote.execution.v2.Digest] of the blob.
-//
-// The lifetime of entries in the CAS is implementation specific, but it SHOULD
-// be long enough to allow for newly-added and recently looked-up entries to be
-// used in subsequent calls (e.g. to
-// [Execute][build.bazel.remote.execution.v2.Execution.Execute]).
-//
-// As with other services in the Remote Execution API, any call may return an
-// error with a [RetryInfo][google.rpc.RetryInfo] error detail providing
-// information about when the client should retry the request; clients SHOULD
-// respect the information provided.
-service ContentAddressableStorage {
- // Determine if blobs are present in the CAS.
- //
- // Clients can use this API before uploading blobs to determine which ones are
- // already present in the CAS and do not need to be uploaded again.
- //
- // There are no method-specific errors.
- rpc FindMissingBlobs(FindMissingBlobsRequest) returns (FindMissingBlobsResponse) {
- option (google.api.http) = { post: "/v2/{instance_name=**}/blobs:findMissing" body: "*" };
- }
-
- // Upload many blobs at once.
- //
- // The server may enforce a limit of the combined total size of blobs
- // to be uploaded using this API. This limit may be obtained using the
- // [Capabilities][build.bazel.remote.execution.v2.Capabilities] API.
- // Requests exceeding the limit should either be split into smaller
- // chunks or uploaded using the
- // [ByteStream API][google.bytestream.ByteStream], as appropriate.
- //
- // This request is equivalent to calling a Bytestream `Write` request
- // on each individual blob, in parallel. The requests may succeed or fail
- // independently.
- //
- // Errors:
- //
- // * `INVALID_ARGUMENT`: The client attempted to upload more than the
- // server supported limit.
- //
- // Individual requests may return the following errors, additionally:
- //
- // * `RESOURCE_EXHAUSTED`: There is insufficient disk quota to store the blob.
- // * `INVALID_ARGUMENT`: The
- // [Digest][build.bazel.remote.execution.v2.Digest] does not match the
- // provided data.
- rpc BatchUpdateBlobs(BatchUpdateBlobsRequest) returns (BatchUpdateBlobsResponse) {
- option (google.api.http) = { post: "/v2/{instance_name=**}/blobs:batchUpdate" body: "*" };
- }
-
- // Download many blobs at once.
- //
- // The server may enforce a limit of the combined total size of blobs
- // to be downloaded using this API. This limit may be obtained using the
- // [Capabilities][build.bazel.remote.execution.v2.Capabilities] API.
- // Requests exceeding the limit should either be split into smaller
- // chunks or downloaded using the
- // [ByteStream API][google.bytestream.ByteStream], as appropriate.
- //
- // This request is equivalent to calling a Bytestream `Read` request
- // on each individual blob, in parallel. The requests may succeed or fail
- // independently.
- //
- // Errors:
- //
- // * `INVALID_ARGUMENT`: The client attempted to read more than the
- // server supported limit.
- //
- // Every error on individual read will be returned in the corresponding digest
- // status.
- rpc BatchReadBlobs(BatchReadBlobsRequest) returns (BatchReadBlobsResponse) {
- option (google.api.http) = { post: "/v2/{instance_name=**}/blobs:batchRead" body: "*" };
- }
-
- // Fetch the entire directory tree rooted at a node.
- //
- // This request must be targeted at a
- // [Directory][build.bazel.remote.execution.v2.Directory] stored in the
- // [ContentAddressableStorage][build.bazel.remote.execution.v2.ContentAddressableStorage]
- // (CAS). The server will enumerate the `Directory` tree recursively and
- // return every node descended from the root.
- //
- // The GetTreeRequest.page_token parameter can be used to skip ahead in
- // the stream (e.g. when retrying a partially completed and aborted request),
- // by setting it to a value taken from GetTreeResponse.next_page_token of the
- // last successfully processed GetTreeResponse).
- //
- // The exact traversal order is unspecified and, unless retrieving subsequent
- // pages from an earlier request, is not guaranteed to be stable across
- // multiple invocations of `GetTree`.
- //
- // If part of the tree is missing from the CAS, the server will return the
- // portion present and omit the rest.
- //
- // Errors:
- //
- // * `NOT_FOUND`: The requested tree root is not present in the CAS.
- rpc GetTree(GetTreeRequest) returns (stream GetTreeResponse) {
- option (google.api.http) = { get: "/v2/{instance_name=**}/blobs/{root_digest.hash}/{root_digest.size_bytes}:getTree" };
- }
-}
-
-// The Capabilities service may be used by remote execution clients to query
-// various server properties, in order to self-configure or return meaningful
-// error messages.
-//
-// The query may include a particular `instance_name`, in which case the values
-// returned will pertain to that instance.
-service Capabilities {
- // GetCapabilities returns the server capabilities configuration of the
- // remote endpoint.
- // Only the capabilities of the services supported by the endpoint will
- // be returned:
- // * Execution + CAS + Action Cache endpoints should return both
- // CacheCapabilities and ExecutionCapabilities.
- // * Execution only endpoints should return ExecutionCapabilities.
- // * CAS + Action Cache only endpoints should return CacheCapabilities.
- rpc GetCapabilities(GetCapabilitiesRequest) returns (ServerCapabilities) {
- option (google.api.http) = {
- get: "/v2/{instance_name=**}/capabilities"
- };
- }
-}
-
// An `Action` captures all the information about an execution which is required
// to reproduce it.
//
@@ -1383,25 +1057,6 @@ message GetCapabilitiesRequest {
string instance_name = 1;
}
-// A response message for
-// [Capabilities.GetCapabilities][build.bazel.remote.execution.v2.Capabilities.GetCapabilities].
-message ServerCapabilities {
- // Capabilities of the remote cache system.
- CacheCapabilities cache_capabilities = 1;
-
- // Capabilities of the remote execution system.
- ExecutionCapabilities execution_capabilities = 2;
-
- // Earliest RE API version supported, including deprecated versions.
- build.bazel.semver.SemVer deprecated_api_version = 3;
-
- // Earliest non-deprecated RE API version supported.
- build.bazel.semver.SemVer low_api_version = 4;
-
- // Latest RE API version supported.
- build.bazel.semver.SemVer high_api_version = 5;
-}
-
// The digest function used for converting values into keys for CAS and Action
// Cache.
message DigestFunction {

View File

@ -11,6 +11,8 @@ da_haskell_binary(
"async",
"base",
"bytestring",
"conduit",
"conduit-extra",
"case-insensitive",
"containers",
"directory",
@ -23,17 +25,23 @@ da_haskell_binary(
"network-uri",
"optparse-applicative",
"process",
"proto3-suite",
"regex-tdfa",
"resourcet",
"safe",
"safe-exceptions",
"semver",
"split",
"stm",
"stm-chans",
"stm-conduit",
"text",
"time",
"unordered-containers",
"utf8-string",
"vector",
],
src_strip_prefix = "src",
visibility = ["//visibility:public"],
deps = [],
deps = ["//3rdparty/haskell:remote_apis"],
)

161
ci/cron/src/BazelCache.hs Normal file
View File

@ -0,0 +1,161 @@
-- Copyright (c) 2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
-- SPDX-License-Identifier: Apache-2.0
-- | This module is a workaround for the `output not created` error we
-- see on Windows CI. We iterate over all AC entries in the cache and
-- look for broken entries with no output and delete those. This
-- fixes the build for nodes that have only fetched this from the
-- cache. For other nodes, it looks like a `clean --expunge` is also
-- required (or a full node reset). See
-- https://github.com/tweag/rules_haskell/issues/1260 for more
-- information.
module BazelCache
( Opts(..)
, Delete(..)
, run
) where
import Build.Bazel.Remote.Execution.V2.RemoteExecution (ActionResult(..), Digest(..))
import Control.Concurrent.Async
import Control.Concurrent.STM
import Control.Concurrent.STM.TBMQueue
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Resource
import qualified Data.ByteString.Lazy as BSL
import Data.Conduit ((.|))
import qualified Data.Conduit as Conduit
import qualified Data.Conduit.Combinators as Conduit
import qualified Data.Conduit.Process.Typed as Conduit
import qualified Data.Conduit.Text as Conduit
import qualified Data.Conduit.TQueue as Conduit
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time
import Data.Time.Format.ISO8601
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import qualified Proto3.Suite as Proto3
import System.IO
data Opts = Opts
{ age :: NominalDiffTime
-- ^ Maximum age of entries that will be considered.
, cacheSuffix :: Maybe String
-- ^ Optional cache suffix to limit the search to.
, queueSize :: Int
-- ^ Size of the queue used to distribute work.
, concurrency :: Int
-- ^ Number of concurrent workers.
, delete :: Delete
-- ^ Whether invalid entries should be deleted.
}
newtype Delete = Delete Bool
run :: Opts -> IO ()
run Opts{..} = do
now <- getCurrentTime
let oldest = addUTCTime (- age) now
let procSpec =
Conduit.setStdout Conduit.createSource $
Conduit.proc "gsutil" ["list", "-l", gsCachePath cacheSuffix]
manager <- newManager tlsManagerSettings
runResourceT $ do
(reg, queue) <- allocate (newTBMQueueIO queueSize) (atomically . closeTBMQueue)
workers <- replicateM concurrency $ do
(_, worker) <- allocate (async (worker delete manager queue)) uninterruptibleCancel
pure worker
liftIO $ Conduit.withProcessWait procSpec $ \p -> do
let outConduit = Conduit.getStdout p
Conduit.runConduit
$ outConduit
.| Conduit.decode Conduit.utf8 .| Conduit.lines
.| Conduit.filter (not . isTotal)
.| Conduit.concatMapM (filterLine oldest)
.| Conduit.sinkTBMQueue queue
release reg
liftIO $ mapM_ wait workers
worker :: Delete -> Manager -> TBMQueue (UTCTime, Text) -> IO ()
worker delete manager queue = Conduit.runConduit
$ Conduit.sourceTBMQueue queue
.| Conduit.concatMapM (validateArtifact manager)
.| Conduit.mapM_ (handleInvalid delete)
-- | Handle an invalid entry.
handleInvalid :: Delete -> (UTCTime, Text, ActionResult) -> IO ()
handleInvalid (Delete delete) (time, path, r) = do
putStrLn $ "Found invalid AC at " <> show path <> " created at " <> show time <> ": " <> show r
when delete $ do
putStrLn $ "Deleting AC " <> show path
Conduit.runProcess_ $
Conduit.proc "gsutil" ["rm", "gs://daml-bazel-cache/" <> T.unpack path]
-- | Filter to lines that parse and are for entries that are not older
-- than the supplied age.
filterLine :: UTCTime -> T.Text -> IO (Maybe (UTCTime, T.Text))
filterLine oldest s = case parseLine s of
Nothing -> do
hPutStrLn stderr $
"ERROR: failed to parse " <> show s <> ", ignoring"
pure Nothing
Just (time, entry)
| time >= oldest -> pure (Just (time, entry))
| otherwise -> pure Nothing
-- | Download and validate the AC artifact at the given path.
-- Returns Nothing for valid artifacts and Just _ for a broken
-- arfiact.
validateArtifact :: Manager -> (UTCTime, Text) -> IO (Maybe (UTCTime, Text, ActionResult))
validateArtifact manager (time, path) = do
req <- parseUrlThrow (cacheUrl (T.unpack path))
resp <- httpLbs req manager
let bs = responseBody resp
case Proto3.fromByteString (BSL.toStrict bs) of
Left err -> do
hPutStrLn stderr $ concat
[ "ERROR: malformed AC entry at"
, show path
, ":"
, show err
, ", ignoring"
]
pure Nothing
Right ac
| isInvalid ac -> pure (Just (time, path, ac))
| otherwise -> pure Nothing
where
isInvalid ActionResult{..} = and
[ null actionResultOutputFiles
, null actionResultOutputDirectories
, maybe True (\r -> digestSizeBytes r == 0) actionResultStdoutDigest
, maybe True (\r -> digestSizeBytes r == 0) actionResultStderrDigest
]
-- | Checks for the last line in `gsutil -l`s output.
isTotal :: Text -> Bool
isTotal = T.isPrefixOf "TOTAL: "
-- | Parse a single line in the output of `gsutil -l`
-- into the time and the cache path.
parseLine :: Text -> Maybe (UTCTime, Text)
parseLine t = do
[_, timeStr, name] <- pure (T.words t)
time <- iso8601ParseM (T.unpack timeStr)
path <- T.stripPrefix "gs://daml-bazel-cache/" name
pure (time, path)
gsCachePath :: Maybe String -> String
gsCachePath mbSuffix = "gs://daml-bazel-cache/" <> suffix <> "/ac/*"
-- Filtering to *-v* isnt strictly necessary but it ensures that
-- we do not walk through the Linux cache which seems to
-- speed things up a bit.
where suffix = fromMaybe "*-v*" mbSuffix
cacheUrl :: String -> String
cacheUrl path = "https://bazel-cache.da-ext.net/" <> path

View File

@ -3,6 +3,8 @@
module Main (main) where
import qualified BazelCache
import Data.Function ((&))
import System.FilePath.Posix ((</>))
@ -425,11 +427,13 @@ data CliArgs = Docs
| Check { bash_lib :: String,
gcp_credentials :: Maybe String,
max_releases :: Maybe Int }
| BazelCache BazelCache.Opts
parser :: Opt.ParserInfo CliArgs
parser = info "This program is meant to be run by CI cron. You probably don't have sufficient access rights to run it locally."
(Opt.hsubparser (Opt.command "docs" docs
<> Opt.command "check" check))
<> Opt.command "check" check
<> Opt.command "bazel-cache" bazelCache))
where info t p = Opt.info (p Opt.<**> Opt.helper) (Opt.progDesc t)
docs = info "Build & push latest docs, if needed."
(pure Docs)
@ -445,6 +449,30 @@ parser = info "This program is meant to be run by CI cron. You probably don't ha
Opt.option Opt.auto (Opt.long "max-releases"
<> Opt.metavar "INT"
<> Opt.help "Max number of releases to check.")))
bazelCache =
info "Bazel cache debugging and fixing." $
fmap BazelCache $ BazelCache.Opts
<$> fmap (\m -> fromInteger (m * 60)) (Opt.option Opt.auto
(Opt.long "age" <>
Opt.help "Maximum age of entries that will be considered in minutes")
)
<*> Opt.optional
(Opt.strOption
(Opt.long "cache-suffix" <>
Opt.help "Cache suffix as set by ci/configure-bazel.sh"))
<*> Opt.option Opt.auto
(Opt.long "queue-size" <>
Opt.value 128 <>
Opt.help "Size of the queue used to distribute tasks among workers")
<*> Opt.option Opt.auto
(Opt.long "concurrency" <>
Opt.value 32 <>
Opt.help "Number of concurrent workers that validate AC entries")
<*> fmap BazelCache.Delete
(Opt.switch
(Opt.long "delete" <>
Opt.help "Whether invalid entries should be deleted or just displayed"))
main :: IO ()
main = do
@ -456,3 +484,4 @@ main = do
docs sdkDocOpts
docs damlOnSqlDocOpts
Check { bash_lib, gcp_credentials, max_releases } -> check_releases gcp_credentials bash_lib max_releases
BazelCache opts -> BazelCache.run opts

View File

@ -225,13 +225,23 @@ def daml_deps():
urls = ["https://github.com/johnynek/bazel_jar_jar/archive/20dbf71f09b1c1c2a8575a42005a968b38805519.zip"], # Latest commit SHA as at 2019/02/13
)
if "com_github_googleapis_googleapis" not in native.existing_rules():
http_archive(
name = "com_github_googleapis_googleapis",
strip_prefix = "googleapis-6c48ab5aef47dc14e02e2dc718d232a28067129d",
urls = ["https://github.com/googleapis/googleapis/archive/6c48ab5aef47dc14e02e2dc718d232a28067129d.tar.gz"],
sha256 = "70d7be6ad49b4424313aad118c8622aab1c5fdd5a529d4215d3884ff89264a71",
)
if "com_github_googleapis_googleapis" not in native.existing_rules():
http_archive(
name = "com_github_googleapis_googleapis",
strip_prefix = "googleapis-6c48ab5aef47dc14e02e2dc718d232a28067129d",
urls = ["https://github.com/googleapis/googleapis/archive/6c48ab5aef47dc14e02e2dc718d232a28067129d.tar.gz"],
sha256 = "70d7be6ad49b4424313aad118c8622aab1c5fdd5a529d4215d3884ff89264a71",
)
if "com_github_bazelbuild_remote_apis" not in native.existing_rules():
http_archive(
name = "com_github_bazelbuild_remote_apis",
strip_prefix = "remote-apis-2.0.0",
urls = ["https://github.com/bazelbuild/remote-apis/archive/v2.0.0.tar.gz"],
sha256 = "79204ed1fa385c03b5235f65b25ced6ac51cf4b00e45e1157beca6a28bdb8043",
patches = ["@com_github_digital_asset_daml//:bazel_tools/remote_apis_no_services.patch"],
patch_args = ["-p1"],
)
# Buildifier.
# It is written in Go and hence needs rules_go to be available.

View File

@ -61,6 +61,7 @@ genrule(
tools = [
"@proto3_suite//:compile-proto-file",
],
visibility = ["//visibility:public"],
)
google_rpc_src = "external/com_github_googleapis_googleapis"
@ -83,6 +84,7 @@ genrule(
tools = [
"@proto3_suite//:compile-proto-file",
],
visibility = ["//visibility:public"],
)
filegroup(

View File

@ -35,6 +35,7 @@ packages:
- regex-tdfa-1.3.1.0
- shake-0.18.5
- simple-smt-0.9.4
- stm-conduit-4.0.1
# Core packages, need to be listed for integer-simple flags.
- integer-simple-0.1.1.1
- text-1.2.3.1

View File

@ -1,6 +1,6 @@
{
"__GENERATED_FILE_DO_NOT_MODIFY_MANUALLY": 1042593609,
"all-cabal-hashes": "https://raw.githubusercontent.com/commercialhaskell/all-cabal-hashes/23a240fcce742ccccab1776df4d39ebb0d5ba2af",
"__GENERATED_FILE_DO_NOT_MODIFY_MANUALLY": 1690089775,
"all-cabal-hashes": "https://raw.githubusercontent.com/commercialhaskell/all-cabal-hashes/0523c09f594a382458057145e85f353ee92ffa31",
"resolved": {
"Cabal": {"dependencies":[],"location":{"type":"core"},"name":"Cabal","version":"3.2.1.0"},
"Decimal": {"dependencies":["base","deepseq"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/Decimal-0.5.1/Decimal-0.5.1.tar.gz"},"name":"Decimal","pinned":{"url":["https://hackage.haskell.org/package/Decimal-0.5.1/Decimal-0.5.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/Decimal-0.5.1.tar.gz"],"sha256":"575ca5c65a8ea5a5bf2cd7b794a0d16622082cb501bf4b0327c5895c0b80f34c","cabal-sha256":"06bdd0f8badfb2104fd052f64888c283792a08f9a0d3661339e133e073080299"},"version":"0.5.1"},
@ -46,6 +46,7 @@
"call-stack": {"dependencies":["base"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/call-stack-0.2.0/call-stack-0.2.0.tar.gz"},"name":"call-stack","pinned":{"url":["https://hackage.haskell.org/package/call-stack-0.2.0/call-stack-0.2.0.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/call-stack-0.2.0.tar.gz"],"sha256":"95c693c93958758d714dee22edc56d4ebb2dcefa4412264e941e32ccd6e4e0c7","cabal-sha256":"5ce796b78d5f964468ec6fe0717b4e7d0430817f37370c47b3e6b38e345b6643"},"version":"0.2.0"},
"case-insensitive": {"dependencies":["base","bytestring","deepseq","hashable","text"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/case-insensitive-1.2.1.0/case-insensitive-1.2.1.0.tar.gz"},"name":"case-insensitive","pinned":{"url":["https://hackage.haskell.org/package/case-insensitive-1.2.1.0/case-insensitive-1.2.1.0.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/case-insensitive-1.2.1.0.tar.gz"],"sha256":"296dc17e0c5f3dfb3d82ced83e4c9c44c338ecde749b278b6eae512f1d04e406","cabal-sha256":"9dfd3171fc7698cf8d931727d3af3a7b389135b583e46b5adac1f9d2026fff61"},"version":"1.2.1.0"},
"cereal": {"dependencies":["array","base","bytestring","containers","ghc-prim"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/cereal-0.5.8.1/cereal-0.5.8.1.tar.gz"},"name":"cereal","pinned":{"url":["https://hackage.haskell.org/package/cereal-0.5.8.1/cereal-0.5.8.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/cereal-0.5.8.1.tar.gz"],"sha256":"2d9e88ac934b9ebc058097c72011ff59f3f146176310e1c957a0e4cf63681bd7","cabal-sha256":"37cb7a78c84412e94592a658768320c41f015f2b8707a433de835afb8ebc18d7"},"version":"0.5.8.1"},
"cereal-conduit": {"dependencies":["base","bytestring","cereal","conduit","resourcet","transformers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/cereal-conduit-0.8.0/cereal-conduit-0.8.0.tar.gz"},"name":"cereal-conduit","pinned":{"url":["https://hackage.haskell.org/package/cereal-conduit-0.8.0/cereal-conduit-0.8.0.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/cereal-conduit-0.8.0.tar.gz"],"sha256":"d95c4518a9984feacfd811c64be993705bff74c1f2daa00b4687bbb79f3a39eb","cabal-sha256":"ec62bcd3f5485dd7af4162483de165da9104fb29284b350d5b28da25dea0bec6"},"version":"0.8.0"},
"charset": {"dependencies":["array","base","bytestring","containers","semigroups","unordered-containers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/charset-0.3.7.1/charset-0.3.7.1.tar.gz"},"name":"charset","pinned":{"url":["https://hackage.haskell.org/package/charset-0.3.7.1/charset-0.3.7.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/charset-0.3.7.1.tar.gz"],"sha256":"3d415d2883bd7bf0cc9f038e8323f19c71e07dd12a3c712f449ccb8b4daac0be","cabal-sha256":"3fc49b320dd636a476110262de6e49f541b6f9704781c91fb6629863951f5d00"},"version":"0.3.7.1"},
"clock": {"dependencies":["base"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/clock-0.8/clock-0.8.tar.gz"},"name":"clock","pinned":{"url":["https://hackage.haskell.org/package/clock-0.8/clock-0.8.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/clock-0.8.tar.gz"],"sha256":"08a35c5294009040f1e5eb721a21b60df7af6584092bb3d376ab1b2e57e26914","cabal-sha256":"b4ae207e2d3761450060a0d0feb873269233898039c76fceef9cc1a544067767"},"version":"0.8"},
"cmark-gfm": {"dependencies":["base","bytestring","text"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/cmark-gfm-0.2.2/cmark-gfm-0.2.2.tar.gz"},"name":"cmark-gfm","pinned":{"url":["https://hackage.haskell.org/package/cmark-gfm-0.2.2/cmark-gfm-0.2.2.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/cmark-gfm-0.2.2.tar.gz"],"sha256":"ee701aff393373f51c2653d7d23482aa7a000451cb2e38f307165616c36b7fea","cabal-sha256":"8d1b7f0447a4f64d25de4be2c2fb09993570158fd30407e3e24d89168be52331"},"version":"0.2.2"},
@ -123,7 +124,7 @@
"hscolour": {"dependencies":["base","containers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hscolour-1.24.4/hscolour-1.24.4.tar.gz"},"name":"hscolour","pinned":{"url":["https://hackage.haskell.org/package/hscolour-1.24.4/hscolour-1.24.4.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hscolour-1.24.4.tar.gz"],"sha256":"243332b082294117f37b2c2c68079fa61af68b36223b3fc07594f245e0e5321d","cabal-sha256":"3a329fa0ea9138f651088f1fa25522aabeab0eb591932d3fd56c66736bbe78be"},"version":"1.24.4"},
"hslogger": {"dependencies":["base","bytestring","containers","deepseq","network","network-bsd","old-locale","time","unix"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hslogger-1.3.1.0/hslogger-1.3.1.0.tar.gz"},"name":"hslogger","pinned":{"url":["https://hackage.haskell.org/package/hslogger-1.3.1.0/hslogger-1.3.1.0.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hslogger-1.3.1.0.tar.gz"],"sha256":"7f2364f6c0b9c5b85a257267a335816126ef2471c817a42797a5d3c57acaca5b","cabal-sha256":"88655f6a7cc52529a7b3053b4ed304b250464b560c91092027bb66d62972a8bc"},"version":"1.3.1.0"},
"hspec": {"dependencies":["QuickCheck","base","hspec-core","hspec-discover","hspec-expectations"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hspec-2.7.5/hspec-2.7.5.tar.gz"},"name":"hspec","pinned":{"url":["https://hackage.haskell.org/package/hspec-2.7.5/hspec-2.7.5.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hspec-2.7.5.tar.gz"],"sha256":"bbbe7314a09a22ba8e006022987ee4e66027b6e5af6212b638ab405d5606cd37","cabal-sha256":"e13681f5539a171b30f06aeacfdb9001f601e694702bb69a7ee03bde9f983100"},"version":"2.7.5"},
"hspec-core": {"dependencies":["HUnit","QuickCheck","ansi-terminal","array","base","call-stack","clock","deepseq","directory","filepath","hspec-expectations","quickcheck-io","random","setenv","stm","tf-random","transformers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hspec-core-2.7.5/hspec-core-2.7.5.tar.gz"},"name":"hspec-core","pinned":{"url":["https://hackage.haskell.org/package/hspec-core-2.7.5/hspec-core-2.7.5.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hspec-core-2.7.5.tar.gz"],"sha256":"f7042bc65bc06c61f1c3b317855b59821d2d5b24b386576050bedd4332f1f55d","cabal-sha256":"19f6cf8cbfc9720709f59508f2226de4818f0e06af53e7597f165eaff6f2578f"},"version":"2.7.5"},
"hspec-core": {"dependencies":["HUnit","QuickCheck","ansi-terminal","array","base","call-stack","clock","deepseq","directory","filepath","hspec-expectations","quickcheck-io","random","setenv","stm","tf-random","transformers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hspec-core-2.7.5/hspec-core-2.7.5.tar.gz"},"name":"hspec-core","pinned":{"url":["https://hackage.haskell.org/package/hspec-core-2.7.5/hspec-core-2.7.5.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hspec-core-2.7.5.tar.gz"],"sha256":"f7042bc65bc06c61f1c3b317855b59821d2d5b24b386576050bedd4332f1f55d","cabal-sha256":"de5b935e6b7d98c248e1c05ea5fdd4ac31afc3e88806d5d7c04f9a5b7d0a3ff9"},"version":"2.7.5"},
"hspec-discover": {"dependencies":["base","directory","filepath"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hspec-discover-2.7.5/hspec-discover-2.7.5.tar.gz"},"name":"hspec-discover","pinned":{"url":["https://hackage.haskell.org/package/hspec-discover-2.7.5/hspec-discover-2.7.5.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hspec-discover-2.7.5.tar.gz"],"sha256":"aeeef9f7c19f4ff41c00e2c2a165f7d02defc0b6eb3ff4ecce4297f08c4f04b4","cabal-sha256":"b2cb9b71b654818ca1b394d169beba0a2b3d67722af620c820199e1821aaa2ee"},"version":"2.7.5"},
"hspec-expectations": {"dependencies":["HUnit","base","call-stack"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hspec-expectations-0.8.2/hspec-expectations-0.8.2.tar.gz"},"name":"hspec-expectations","pinned":{"url":["https://hackage.haskell.org/package/hspec-expectations-0.8.2/hspec-expectations-0.8.2.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hspec-expectations-0.8.2.tar.gz"],"sha256":"819607ea1faf35ce5be34be61c6f50f3389ea43892d56fb28c57a9f5d54fb4ef","cabal-sha256":"e2db24881baadc2d9d23b03cb629e80dcbda89a6b04ace9adb5f4d02ef8b31aa"},"version":"0.8.2"},
"http-client": {"dependencies":["array","base","blaze-builder","bytestring","case-insensitive","containers","cookie","deepseq","exceptions","filepath","ghc-prim","http-types","memory","mime-types","network","network-uri","random","stm","streaming-commons","text","time","transformers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/http-client-0.6.4.1/http-client-0.6.4.1.tar.gz"},"name":"http-client","pinned":{"url":["https://hackage.haskell.org/package/http-client-0.6.4.1/http-client-0.6.4.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/http-client-0.6.4.1.tar.gz"],"sha256":"5742f36965c1030d7fb52b5fc67ccd45802f6f7e55eb7595df4eef6ea0eb22f8","cabal-sha256":"ea569e3ebc00e7bb68162fbdf85aa63e653b974eaff63df7b972e4296687e25c"},"version":"0.6.4.1"},
@ -230,6 +231,7 @@
"stache": {"dependencies":["aeson","base","bytestring","containers","deepseq","directory","filepath","gitrev","megaparsec","mtl","optparse-applicative","template-haskell","text","unordered-containers","vector","yaml"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/stache-2.2.0/stache-2.2.0.tar.gz"},"name":"stache","pinned":{"url":["https://hackage.haskell.org/package/stache-2.2.0/stache-2.2.0.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/stache-2.2.0.tar.gz"],"sha256":"6ffce1c3caa0b50eaf56bdd9a397eaf5a1b7d14a9b660b1c7e8908a121928e25","cabal-sha256":"7db7cc12275877b4f623a1abaa7f00f28ea9507cef8ace3113dfbaacf1b14df2"},"version":"2.2.0"},
"stm": {"dependencies":[],"location":{"type":"core"},"name":"stm","version":"2.5.0.0"},
"stm-chans": {"dependencies":["Cabal","base","stm"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/stm-chans-3.0.0.4/stm-chans-3.0.0.4.tar.gz"},"name":"stm-chans","pinned":{"url":["https://hackage.haskell.org/package/stm-chans-3.0.0.4/stm-chans-3.0.0.4.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/stm-chans-3.0.0.4.tar.gz"],"sha256":"2344fc5bfa33d565bad7b009fc0e2c5a7a595060ba149c661f44419fc0d54738","cabal-sha256":"e2eef3cea5251628f7b2ad22c24a1e5d08b83c19be4bd886ab0d7fed58ef2a6d"},"version":"3.0.0.4"},
"stm-conduit": {"dependencies":["async","base","cereal","cereal-conduit","conduit","conduit-extra","directory","exceptions","monad-loops","resourcet","stm","stm-chans","transformers","unliftio"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/stm-conduit-4.0.1/stm-conduit-4.0.1.tar.gz"},"name":"stm-conduit","pinned":{"url":["https://hackage.haskell.org/package/stm-conduit-4.0.1/stm-conduit-4.0.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/stm-conduit-4.0.1.tar.gz"],"sha256":"e80e5be72a4564fa45e1e27f91c0984e12d2a736d0ceb9594350d573efee1442","cabal-sha256":"71f477c617400fb5bee74557416d7dac17dccea0663ec29bf9ca67b52703637d"},"version":"4.0.1"},
"streaming-commons": {"dependencies":["array","async","base","bytestring","directory","network","process","random","stm","text","transformers","unix","zlib"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/streaming-commons-0.2.2.1/streaming-commons-0.2.2.1.tar.gz"},"name":"streaming-commons","pinned":{"url":["https://hackage.haskell.org/package/streaming-commons-0.2.2.1/streaming-commons-0.2.2.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/streaming-commons-0.2.2.1.tar.gz"],"sha256":"306940bf4878a0b714e6746a7f934d018100efc86332c176a648014bfe1e81dd","cabal-sha256":"28abce35b48dcfb871926dad4cb37bdf737372892b4e5222abc97ca31f2ac738"},"version":"0.2.2.1"},
"strict": {"dependencies":["assoc","base","binary","bytestring","deepseq","ghc-prim","hashable","text","these","transformers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/strict-0.4.0.1/strict-0.4.0.1.tar.gz"},"name":"strict","pinned":{"url":["https://hackage.haskell.org/package/strict-0.4.0.1/strict-0.4.0.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/strict-0.4.0.1.tar.gz"],"sha256":"dff6abc08ad637e51891bb8b475778c40926c51219eda60fd64f0d9680226241","cabal-sha256":"08cf72ad570fddfe3b3424117bf20a303a1fb21047b40c1d6c8004c0e3e02a0b"},"version":"0.4.0.1"},
"swagger2": {"dependencies":["Cabal","QuickCheck","aeson","aeson-pretty","base","base-compat-batteries","bytestring","cabal-doctest","containers","cookie","generics-sop","hashable","http-media","insert-ordered-containers","lens","mtl","network","optics-core","optics-th","scientific","template-haskell","text","time","transformers","transformers-compat","unordered-containers","uuid-types","vector"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/swagger2-2.6/swagger2-2.6.tar.gz"},"name":"swagger2","pinned":{"url":["https://hackage.haskell.org/package/swagger2-2.6/swagger2-2.6.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/swagger2-2.6.tar.gz"],"sha256":"682afe3b43d6b7c394cab330bb48692b8045dff8db3e8913bbfabee0fa8c706e","cabal-sha256":"12c0752b397da71e286a2f8998dfd842376df56f624deb0ad8d858c6e20cb8bd"},"version":"2.6"},

View File

@ -1,6 +1,6 @@
{
"__GENERATED_FILE_DO_NOT_MODIFY_MANUALLY": 1623318854,
"all-cabal-hashes": "https://raw.githubusercontent.com/commercialhaskell/all-cabal-hashes/23a240fcce742ccccab1776df4d39ebb0d5ba2af",
"__GENERATED_FILE_DO_NOT_MODIFY_MANUALLY": 1621880175,
"all-cabal-hashes": "https://raw.githubusercontent.com/commercialhaskell/all-cabal-hashes/49215024d42af6a1bc3d7c50b860dd4705da63ff",
"resolved": {
"Cabal": {"dependencies":[],"location":{"type":"core"},"name":"Cabal","version":"3.2.1.0"},
"Decimal": {"dependencies":["base","deepseq"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/Decimal-0.5.1/Decimal-0.5.1.tar.gz"},"name":"Decimal","pinned":{"url":["https://hackage.haskell.org/package/Decimal-0.5.1/Decimal-0.5.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/Decimal-0.5.1.tar.gz"],"sha256":"575ca5c65a8ea5a5bf2cd7b794a0d16622082cb501bf4b0327c5895c0b80f34c","cabal-sha256":"06bdd0f8badfb2104fd052f64888c283792a08f9a0d3661339e133e073080299"},"version":"0.5.1"},
@ -47,6 +47,7 @@
"call-stack": {"dependencies":["base"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/call-stack-0.2.0/call-stack-0.2.0.tar.gz"},"name":"call-stack","pinned":{"url":["https://hackage.haskell.org/package/call-stack-0.2.0/call-stack-0.2.0.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/call-stack-0.2.0.tar.gz"],"sha256":"95c693c93958758d714dee22edc56d4ebb2dcefa4412264e941e32ccd6e4e0c7","cabal-sha256":"5ce796b78d5f964468ec6fe0717b4e7d0430817f37370c47b3e6b38e345b6643"},"version":"0.2.0"},
"case-insensitive": {"dependencies":["base","bytestring","deepseq","hashable","text"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/case-insensitive-1.2.1.0/case-insensitive-1.2.1.0.tar.gz"},"name":"case-insensitive","pinned":{"url":["https://hackage.haskell.org/package/case-insensitive-1.2.1.0/case-insensitive-1.2.1.0.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/case-insensitive-1.2.1.0.tar.gz"],"sha256":"296dc17e0c5f3dfb3d82ced83e4c9c44c338ecde749b278b6eae512f1d04e406","cabal-sha256":"9dfd3171fc7698cf8d931727d3af3a7b389135b583e46b5adac1f9d2026fff61"},"version":"1.2.1.0"},
"cereal": {"dependencies":["array","base","bytestring","containers","ghc-prim"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/cereal-0.5.8.1/cereal-0.5.8.1.tar.gz"},"name":"cereal","pinned":{"url":["https://hackage.haskell.org/package/cereal-0.5.8.1/cereal-0.5.8.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/cereal-0.5.8.1.tar.gz"],"sha256":"2d9e88ac934b9ebc058097c72011ff59f3f146176310e1c957a0e4cf63681bd7","cabal-sha256":"37cb7a78c84412e94592a658768320c41f015f2b8707a433de835afb8ebc18d7"},"version":"0.5.8.1"},
"cereal-conduit": {"dependencies":["base","bytestring","cereal","conduit","resourcet","transformers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/cereal-conduit-0.8.0/cereal-conduit-0.8.0.tar.gz"},"name":"cereal-conduit","pinned":{"url":["https://hackage.haskell.org/package/cereal-conduit-0.8.0/cereal-conduit-0.8.0.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/cereal-conduit-0.8.0.tar.gz"],"sha256":"d95c4518a9984feacfd811c64be993705bff74c1f2daa00b4687bbb79f3a39eb","cabal-sha256":"ec62bcd3f5485dd7af4162483de165da9104fb29284b350d5b28da25dea0bec6"},"version":"0.8.0"},
"charset": {"dependencies":["array","base","bytestring","containers","semigroups","unordered-containers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/charset-0.3.7.1/charset-0.3.7.1.tar.gz"},"name":"charset","pinned":{"url":["https://hackage.haskell.org/package/charset-0.3.7.1/charset-0.3.7.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/charset-0.3.7.1.tar.gz"],"sha256":"3d415d2883bd7bf0cc9f038e8323f19c71e07dd12a3c712f449ccb8b4daac0be","cabal-sha256":"3fc49b320dd636a476110262de6e49f541b6f9704781c91fb6629863951f5d00"},"version":"0.3.7.1"},
"clock": {"dependencies":["base"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/clock-0.8/clock-0.8.tar.gz"},"name":"clock","pinned":{"url":["https://hackage.haskell.org/package/clock-0.8/clock-0.8.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/clock-0.8.tar.gz"],"sha256":"08a35c5294009040f1e5eb721a21b60df7af6584092bb3d376ab1b2e57e26914","cabal-sha256":"b4ae207e2d3761450060a0d0feb873269233898039c76fceef9cc1a544067767"},"version":"0.8"},
"cmark-gfm": {"dependencies":["base","bytestring","text"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/cmark-gfm-0.2.2/cmark-gfm-0.2.2.tar.gz"},"name":"cmark-gfm","pinned":{"url":["https://hackage.haskell.org/package/cmark-gfm-0.2.2/cmark-gfm-0.2.2.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/cmark-gfm-0.2.2.tar.gz"],"sha256":"ee701aff393373f51c2653d7d23482aa7a000451cb2e38f307165616c36b7fea","cabal-sha256":"8d1b7f0447a4f64d25de4be2c2fb09993570158fd30407e3e24d89168be52331"},"version":"0.2.2"},
@ -124,7 +125,7 @@
"hscolour": {"dependencies":["base","containers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hscolour-1.24.4/hscolour-1.24.4.tar.gz"},"name":"hscolour","pinned":{"url":["https://hackage.haskell.org/package/hscolour-1.24.4/hscolour-1.24.4.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hscolour-1.24.4.tar.gz"],"sha256":"243332b082294117f37b2c2c68079fa61af68b36223b3fc07594f245e0e5321d","cabal-sha256":"3a329fa0ea9138f651088f1fa25522aabeab0eb591932d3fd56c66736bbe78be"},"version":"1.24.4"},
"hslogger": {"dependencies":["base","bytestring","containers","deepseq","network","network-bsd","old-locale","time"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hslogger-1.3.1.0/hslogger-1.3.1.0.tar.gz"},"name":"hslogger","pinned":{"url":["https://hackage.haskell.org/package/hslogger-1.3.1.0/hslogger-1.3.1.0.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hslogger-1.3.1.0.tar.gz"],"sha256":"7f2364f6c0b9c5b85a257267a335816126ef2471c817a42797a5d3c57acaca5b","cabal-sha256":"88655f6a7cc52529a7b3053b4ed304b250464b560c91092027bb66d62972a8bc"},"version":"1.3.1.0"},
"hspec": {"dependencies":["QuickCheck","base","hspec-core","hspec-discover","hspec-expectations"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hspec-2.7.5/hspec-2.7.5.tar.gz"},"name":"hspec","pinned":{"url":["https://hackage.haskell.org/package/hspec-2.7.5/hspec-2.7.5.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hspec-2.7.5.tar.gz"],"sha256":"bbbe7314a09a22ba8e006022987ee4e66027b6e5af6212b638ab405d5606cd37","cabal-sha256":"e13681f5539a171b30f06aeacfdb9001f601e694702bb69a7ee03bde9f983100"},"version":"2.7.5"},
"hspec-core": {"dependencies":["HUnit","QuickCheck","ansi-terminal","array","base","call-stack","clock","deepseq","directory","filepath","hspec-expectations","quickcheck-io","random","setenv","stm","tf-random","transformers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hspec-core-2.7.5/hspec-core-2.7.5.tar.gz"},"name":"hspec-core","pinned":{"url":["https://hackage.haskell.org/package/hspec-core-2.7.5/hspec-core-2.7.5.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hspec-core-2.7.5.tar.gz"],"sha256":"f7042bc65bc06c61f1c3b317855b59821d2d5b24b386576050bedd4332f1f55d","cabal-sha256":"19f6cf8cbfc9720709f59508f2226de4818f0e06af53e7597f165eaff6f2578f"},"version":"2.7.5"},
"hspec-core": {"dependencies":["HUnit","QuickCheck","ansi-terminal","array","base","call-stack","clock","deepseq","directory","filepath","hspec-expectations","quickcheck-io","random","setenv","stm","tf-random","transformers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hspec-core-2.7.5/hspec-core-2.7.5.tar.gz"},"name":"hspec-core","pinned":{"url":["https://hackage.haskell.org/package/hspec-core-2.7.5/hspec-core-2.7.5.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hspec-core-2.7.5.tar.gz"],"sha256":"f7042bc65bc06c61f1c3b317855b59821d2d5b24b386576050bedd4332f1f55d","cabal-sha256":"de5b935e6b7d98c248e1c05ea5fdd4ac31afc3e88806d5d7c04f9a5b7d0a3ff9"},"version":"2.7.5"},
"hspec-discover": {"dependencies":["base","directory","filepath"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hspec-discover-2.7.5/hspec-discover-2.7.5.tar.gz"},"name":"hspec-discover","pinned":{"url":["https://hackage.haskell.org/package/hspec-discover-2.7.5/hspec-discover-2.7.5.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hspec-discover-2.7.5.tar.gz"],"sha256":"aeeef9f7c19f4ff41c00e2c2a165f7d02defc0b6eb3ff4ecce4297f08c4f04b4","cabal-sha256":"b2cb9b71b654818ca1b394d169beba0a2b3d67722af620c820199e1821aaa2ee"},"version":"2.7.5"},
"hspec-expectations": {"dependencies":["HUnit","base","call-stack"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/hspec-expectations-0.8.2/hspec-expectations-0.8.2.tar.gz"},"name":"hspec-expectations","pinned":{"url":["https://hackage.haskell.org/package/hspec-expectations-0.8.2/hspec-expectations-0.8.2.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/hspec-expectations-0.8.2.tar.gz"],"sha256":"819607ea1faf35ce5be34be61c6f50f3389ea43892d56fb28c57a9f5d54fb4ef","cabal-sha256":"e2db24881baadc2d9d23b03cb629e80dcbda89a6b04ace9adb5f4d02ef8b31aa"},"version":"0.8.2"},
"http-client": {"dependencies":["Win32","array","base","blaze-builder","bytestring","case-insensitive","containers","cookie","deepseq","exceptions","filepath","ghc-prim","http-types","memory","mime-types","network","network-uri","random","safe","stm","streaming-commons","text","time","transformers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/http-client-0.6.4.1/http-client-0.6.4.1.tar.gz"},"name":"http-client","pinned":{"url":["https://hackage.haskell.org/package/http-client-0.6.4.1/http-client-0.6.4.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/http-client-0.6.4.1.tar.gz"],"sha256":"5742f36965c1030d7fb52b5fc67ccd45802f6f7e55eb7595df4eef6ea0eb22f8","cabal-sha256":"ea569e3ebc00e7bb68162fbdf85aa63e653b974eaff63df7b972e4296687e25c"},"version":"0.6.4.1"},
@ -232,6 +233,7 @@
"stache": {"dependencies":["aeson","base","bytestring","containers","deepseq","directory","filepath","gitrev","megaparsec","mtl","optparse-applicative","template-haskell","text","unordered-containers","vector","yaml"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/stache-2.2.0/stache-2.2.0.tar.gz"},"name":"stache","pinned":{"url":["https://hackage.haskell.org/package/stache-2.2.0/stache-2.2.0.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/stache-2.2.0.tar.gz"],"sha256":"6ffce1c3caa0b50eaf56bdd9a397eaf5a1b7d14a9b660b1c7e8908a121928e25","cabal-sha256":"7db7cc12275877b4f623a1abaa7f00f28ea9507cef8ace3113dfbaacf1b14df2"},"version":"2.2.0"},
"stm": {"dependencies":[],"location":{"type":"core"},"name":"stm","version":"2.5.0.0"},
"stm-chans": {"dependencies":["Cabal","base","stm"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/stm-chans-3.0.0.4/stm-chans-3.0.0.4.tar.gz"},"name":"stm-chans","pinned":{"url":["https://hackage.haskell.org/package/stm-chans-3.0.0.4/stm-chans-3.0.0.4.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/stm-chans-3.0.0.4.tar.gz"],"sha256":"2344fc5bfa33d565bad7b009fc0e2c5a7a595060ba149c661f44419fc0d54738","cabal-sha256":"e2eef3cea5251628f7b2ad22c24a1e5d08b83c19be4bd886ab0d7fed58ef2a6d"},"version":"3.0.0.4"},
"stm-conduit": {"dependencies":["async","base","cereal","cereal-conduit","conduit","conduit-extra","directory","exceptions","monad-loops","resourcet","stm","stm-chans","transformers","unliftio"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/stm-conduit-4.0.1/stm-conduit-4.0.1.tar.gz"},"name":"stm-conduit","pinned":{"url":["https://hackage.haskell.org/package/stm-conduit-4.0.1/stm-conduit-4.0.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/stm-conduit-4.0.1.tar.gz"],"sha256":"e80e5be72a4564fa45e1e27f91c0984e12d2a736d0ceb9594350d573efee1442","cabal-sha256":"71f477c617400fb5bee74557416d7dac17dccea0663ec29bf9ca67b52703637d"},"version":"4.0.1"},
"streaming-commons": {"dependencies":["Win32","array","async","base","bytestring","directory","filepath","network","process","random","stm","text","transformers","zlib"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/streaming-commons-0.2.2.1/streaming-commons-0.2.2.1.tar.gz"},"name":"streaming-commons","pinned":{"url":["https://hackage.haskell.org/package/streaming-commons-0.2.2.1/streaming-commons-0.2.2.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/streaming-commons-0.2.2.1.tar.gz"],"sha256":"306940bf4878a0b714e6746a7f934d018100efc86332c176a648014bfe1e81dd","cabal-sha256":"28abce35b48dcfb871926dad4cb37bdf737372892b4e5222abc97ca31f2ac738"},"version":"0.2.2.1"},
"strict": {"dependencies":["assoc","base","binary","bytestring","deepseq","ghc-prim","hashable","text","these","transformers"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/strict-0.4.0.1/strict-0.4.0.1.tar.gz"},"name":"strict","pinned":{"url":["https://hackage.haskell.org/package/strict-0.4.0.1/strict-0.4.0.1.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/strict-0.4.0.1.tar.gz"],"sha256":"dff6abc08ad637e51891bb8b475778c40926c51219eda60fd64f0d9680226241","cabal-sha256":"08cf72ad570fddfe3b3424117bf20a303a1fb21047b40c1d6c8004c0e3e02a0b"},"version":"0.4.0.1"},
"swagger2": {"dependencies":["Cabal","QuickCheck","aeson","aeson-pretty","base","base-compat-batteries","bytestring","cabal-doctest","containers","cookie","generics-sop","hashable","http-media","insert-ordered-containers","lens","mtl","network","optics-core","optics-th","scientific","template-haskell","text","time","transformers","transformers-compat","unordered-containers","uuid-types","vector"],"location":{"type":"hackage","url":"https://hackage.haskell.org/package/swagger2-2.6/swagger2-2.6.tar.gz"},"name":"swagger2","pinned":{"url":["https://hackage.haskell.org/package/swagger2-2.6/swagger2-2.6.tar.gz","https://s3.amazonaws.com/hackage.fpcomplete.com/package/swagger2-2.6.tar.gz"],"sha256":"682afe3b43d6b7c394cab330bb48692b8045dff8db3e8913bbfabee0fa8c706e","cabal-sha256":"12c0752b397da71e286a2f8998dfd842376df56f624deb0ad8d858c6e20cb8bd"},"version":"2.6"},