daml/WORKSPACE

889 lines
24 KiB
Python
Raw Normal View History

workspace(
name = "com_github_digital_asset_daml",
)
2019-04-04 11:33:38 +03:00
# NOTE(JM): Load external dependencies from deps.bzl.
# Do not put "http_archive" and similar rules into this file. Put them into
# deps.bzl. This allows using this repository as an external workspace.
# (though with the caveat that that user needs to repeat the relevant bits of
# magic in this file, but at least right versions of external rules are picked).
2019-04-04 11:33:38 +03:00
load("//:deps.bzl", "daml_deps")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive", "http_file")
2019-04-04 11:33:38 +03:00
daml_deps()
load("@rules_haskell//haskell:repositories.bzl", "rules_haskell_dependencies")
load("@com_github_bazelbuild_remote_apis//:repository_rules.bzl", "switched_rules_by_language")
switched_rules_by_language(
name = "bazel_remote_apis_imports",
)
rules_haskell_dependencies()
Update netty and iogrpc version (#6063) * io.grpc:grpc-xxxx to 1.29.0 (from 1.22.1) io.netty:netty-xxxx to .1.50.Final (from 4.1.37.Final) io.nett.netty-tcp-native-boringssl-static to 2.0.30.Final (from 2.0.25.Final) To resolve open vulnerabilities with these versions netty-4.1.37.Final vulnerabilities BDSA-2018-4022 (Medium) BDSA-2019-2610 (Medium) BDSA-2019-3119 (CVE-2019-16869) (Medium) BDSA-2020-0130 (Medium) BDSA BDSA-2019-4230 (CVE-2019-20445) (Low) BDSA BDSA-2019-4231 (CVE-2019-20444) (Low) BDSA BDSA-2020-0666 (CVE-2020-11612) (Low) BDSA BDSA-2019-2642 (Low) BDSA BDSA-2019-2649 (Low) BDSA BDSA-2019-2643 (Low) CHANGELOG_BEGIN Upgrade io.grpc:grpc-xxxxx and io.netty:netty-xxx version to latest released to avoid exposure to reported security vulnerabilities in currently used versions CHANGELOG_END – * Update spray versions to address vulnerabilities CVE-2018-18853 and CVE-2018-18854 CHANGELOG_BEGIN Upgrade io.grpc:grpc-xxxxx and io.netty:netty-xxx version to latest released to avoid exposure to reported security vulnerabilities in currently used versions CHANGELOG_END * do not change io.grpc version since reflection seems to be misbehaving * Clarify how to bump grpc/netty/protobuf versions Also "downgrade" netty to 4.1.48, according to https://github.com/grpc/grpc-java/blob/master/SECURITY.md#netty CHANGELOG_BEGIN CHANGELOG_END * Load protobuf deps after haskell deps to avoid loading an older version of rules_cc * Upgrade protoc and protobuf-java to 3.11.0 * buildifier reformat * regen unique int after rebase * remove commented patch Co-authored-by: Gerolf Seitz <gerolf.seitz@daml.com>
2020-05-27 20:44:34 +03:00
load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps")
protobuf_deps()
load("@rules_pkg//:deps.bzl", "rules_pkg_dependencies")
rules_pkg_dependencies()
2019-04-04 11:33:38 +03:00
register_toolchains(
"//:c2hs-toolchain",
2019-04-04 11:33:38 +03:00
)
load("//bazel_tools/dev_env_tool:dev_env_tool.bzl", "dadew", "dev_env_tool")
2019-04-04 11:33:38 +03:00
load(
"@io_tweag_rules_nixpkgs//nixpkgs:nixpkgs.bzl",
"nixpkgs_cc_configure",
"nixpkgs_local_repository",
"nixpkgs_package",
"nixpkgs_python_configure",
2019-04-04 11:33:38 +03:00
)
load("//bazel_tools:create_workspace.bzl", "create_workspace")
2019-04-04 11:33:38 +03:00
load("//bazel_tools:os_info.bzl", "os_info")
os_info(name = "os_info")
introduce new release process (#4513) Context ======= After multiple discussions about our current release schedule and process, we've come to the conclusion that we need to be able to make a distinction between technical snapshots and marketing releases. In other words, we need to be able to create a bundle for early adopters to test without making it an officially-supported version, and without necessarily implying everyone should go through the trouble of upgrading. The underlying goal is to have less frequent but more stable "official" releases. This PR is a proposal for a new release process designed under the following constraints: - Reuse as much as possible of the existing infrastructure, to minimize effort but also chances of disruptions. - Have the ability to create "snapshot"/"nightly"/... releases that are not meant for general public consumption, but can still be used by savvy users without jumping through too many extra hoops (ideally just swapping in a slightly-weirder version string). - Have the ability to promote an existing snapshot release to "official" release status, with as few changes as possible in-between, so we can be confident that the official release is what we tested as a prerelease. - Have as much of the release pipeline shared between the two types of releases, to avoid discovering non-transient problems while trying to promote a snapshot to an official release. - Triggerring a release should still be done through a PR, so we can keep the same approval process for SOC2 auditability. The gist of this proposal is to replace the current `VERSION` file with a `LATEST` file, which would have the following format: ``` ef5d32b7438e481de0235c5538aedab419682388 0.13.53-alpha.20200214.3025.ef5d32b7 ``` This file would be maintained with a script to reduce manual labor in producing the version string. Other than that, the process will be largely the same, with releases triggered by changes to this `LATEST` and the release notes files. Version numbers =============== Because one of the goals is to reduce the velocity of our published version numbers, we need a different version scheme for our snapshot releases. Fortunately, most version schemes have some support for that; unfortunately, the SDK sits at the intersection of three different version schemes that have made incompatible choices. Without going into too much detail: - Semantic versioning (which we chose as the version format for the SDK version number) allows for "prerelease" version numbers as well as "metadata"; an example of a complete version string would be `1.2.3-nightly.201+server12.43`. The "main" part of the version string always has to have 3 numbers separated by dots; the "prerelease" (after the `-` but before the `+`) and the "metadata" (after the `+`) parts are optional and, if present, must consist of one or more segments separated by dots, where a segment can be either a number or an alphanumeric string. In terms of ordering, metadata is irrelevant and any version with a prerelease string is before the corresponding "main" version string alone. Amongst prereleases, segments are compared in order with purely numeric ones compared as numbers and mixed ones compared lexicographically. So 1.2.3 is more recent than 1.2.3-1, which is itself less recent than 1.2.3-2. - Maven version strings are any number of segments separated by a `.`, a `-`, or a transition between a number and a letter. Version strings are compared element-wise, with numeric segments being compared as numbers. Alphabetic segments are treated specially if they happen to be one of a handful of magic words (such as "alpha", "beta" or "snapshot" for example) which count as "qualifiers"; a version string with a qualifier is "before" its prefix (`1.2.3` is before `1.2.3-alpha.3`, which is the same as `1.2.3-alpha3` or `1.2.3-alpha-3`), and there is a special ordering amongst qualifiers. Other alphabetic segments are compared alphabetically and count as being "after" their prefix (`1.2.3-really-final-this-time` counts as being released after `1.2.3`). - GHC package numbers are comprised of any number of numeric segments separated by `.`, plus an optional (though deprecated) alphanumeric "version tag" separated by a `-`. I could not find any official documentation on ordering for the version tag; numeric segments are compared as numbers. - npm uses semantic versioning so that is covered already. After much more investigation than I'd care to admit, I have come up with the following compromise as the least-bad solution. First, obviously, the version string for stable/marketing versions is going to be "standard" semver, i.e. major.minor.patch, all numbers, which works, and sorts as expected, for all three schemes. For snapshot releases, we shall use the following (semver) format: ``` 0.13.53-alpha.20200214.3025.ef5d32b7 ``` where the components are, respectively: - `0.13.53`: the expected version string of the next "stable" release. - `alpha`: a marker that hopefully scares people enough. - `20200214`: the date of the release commit, which _MUST_ be on master. - `3025`: the number of commits in master up to the release commit (included). Because we have a linear, append-only master branch, this uniquely identifies the commit. - `ef5d32b7ù : the first 8 characters of the release commit sha. This is not strictly speaking necessary, but makes it a lot more convenient to identify the commit. The main downsides of this format are: 1. It is not a valid format for GHC packages. We do not publish GHC packages from the SDK (so far we have instead opted to release our Haskell code as separate packages entirely), so this should not be an issue. However, our SDK version currently leaks to `ghc-pkg` as the version string for the stdlib (and prim) packages. This PR addresses that by tweaking the compiler to remove the offending bits, so `ghc-pkg` would see the above version number as `0.13.53.20200214.3025`, which should be enough to uniquely identify it. Note that, as far as I could find out, this number would never be exposed to users. 2. It is rather long, which I think is good from a human perspective as it makes it more scary. However, I have been told that this may be long enough to cause issues on Windows by pushing us past the max path size limitation of that "OS". I suggest we try it and see what happens. The upsides are: - It clearly indicates it is an unstable release (`alpha`). - It clearly indicates how old it is, by including the date. - To humans, it is immediately obvious which version is "later" even if they have the same date, allowing us to release same-day patches if needed. (Note: that is, commits that were made on the same day; the release date itself is irrelevant here.) - It contains the git sha so the commit built for that release is immediately obvious. - It sorts correctly under all schemes (modulo the modification for GHC). Alternatives I considered: - Pander to GHC: 0.13.53-alpha-20200214-3025-ef5d32b7. This format would be accepted by all schemes, but will not sort as expected under semantic versioning (though Maven will be fine). I have no idea how it will sort under GHC. - Not having any non-numeric component, e.g. `0.13.53.20200214.3025`. This is not valid semantic versioning and is therefore rejected by npm. - Not having detailed info: just go with `0.13.53-snapshot`. This is what is generally done in the Java world, but we then lose track of what version is actually in use and I'm concerned about bug reports. This would also not let us publish to the main Maven repo (at least not more than once), as artifacts there are supposed to be immutable. - No having a qualifier: `0.13.53-3025` would be acceptable to all three version formats. However, it would not clearly indicate to humans that it is not meant as a stable version, and would sort differently under semantic versioning (which counts it as a prerelease, i.e. before `0.13.53`) than under maven (which counts it as a patch, so after `0.13.53`). - Just counting releases: `0.13.53-alpha.1`, where we just count the number of prereleases in-between `0.13.52` and the next. This is currently the fallback plan if Windows path length causes issues. It would be less convenient to map releases to commits, but it could still be done via querying the history of the `LATEST` file. Release notes ============= > Note: We have decided not to have release notes for snapshot releases. Release notes are a bit tricky. Because we want the ability to make snapshot releases, then later on promote them to stable releases, it follows that we want to build commits from the past. However, if we decide post-hoc that a commit is actually a good candidate for a release, there is no way that commit can have the appropriate release notes: it cannot know what version number it's getting, and, moreover, we now track changes in commit messages. And I do not think anyone wants to go back to the release notes file being a merge bottleneck. But release notes need to be published to the releases blog upon releasing a stable version, and the docs website needs to be updated and include them. The only sensible solution here is to pick up the release notes as of the commit that triggers the release. As the docs cron runs asynchronously, this means walking down the git history to find the relevant commit. > Note: We could probably do away with the asynchronicity at this point. > It was originally included to cover for the possibility of a release > failing. If we are releasing commits from the past after they have been > tested, this should not be an issue anymore. If the docs generation were > part of the synchronous release step, it would have direct access to the > correct release notes without having to walk down the git history. > > However, I think it is more prudent to keep this change as a future step, > after we're confident the new release scheme does indeed produce much more > reliable "stable" releases. New release process =================== Just like releases are currently controlled mostly by detecting changes to the `VERSION` file, the new process will be controlled by detecting changes to the `LATEST` file. The format of that file will include both the version string and the corresponding SHA. Upon detecting a change to the `LATEST` file, CI will run the entire release process, just like it does now with the VERSION file. The main differences are: 1. Before running the release step, CI will checkout the commit specified in the LATEST file. This requires separating the release step from the build step, which in my opinion is cleaner anyway. 2. The `//:VERSION` Bazel target is replaced by a repository rule that gets the version to build from an environment variable, with a default of `0.0.0` to remain consistent with the current `daml-head` behaviour. Some of the manual steps will need to be skipped for a snapshot release. See amended `release/RELEASE.md` in this commit for details. The main caveat of this approach is that the official release will be a different binary from the corresponding snapshot. It will have been built from the same source, but with a different version string. This is somewhat mitigated by Bazel caching, meaning any build step that does not depend on the version string should use the cache and produce identical results. I do not think this can be avoided when our artifact includes its own version number. I must note, though, that while going through the changes required after removing the `VERSION` file, I have been quite surprised at the sheer number of things that actually depend on the SDK version number. I believe we should look into reducing that over time. CHANGELOG_BEGIN CHANGELOG_END
2020-02-25 19:01:23 +03:00
load("//bazel_tools:build_environment.bzl", "build_environment")
introduce new release process (#4513) Context ======= After multiple discussions about our current release schedule and process, we've come to the conclusion that we need to be able to make a distinction between technical snapshots and marketing releases. In other words, we need to be able to create a bundle for early adopters to test without making it an officially-supported version, and without necessarily implying everyone should go through the trouble of upgrading. The underlying goal is to have less frequent but more stable "official" releases. This PR is a proposal for a new release process designed under the following constraints: - Reuse as much as possible of the existing infrastructure, to minimize effort but also chances of disruptions. - Have the ability to create "snapshot"/"nightly"/... releases that are not meant for general public consumption, but can still be used by savvy users without jumping through too many extra hoops (ideally just swapping in a slightly-weirder version string). - Have the ability to promote an existing snapshot release to "official" release status, with as few changes as possible in-between, so we can be confident that the official release is what we tested as a prerelease. - Have as much of the release pipeline shared between the two types of releases, to avoid discovering non-transient problems while trying to promote a snapshot to an official release. - Triggerring a release should still be done through a PR, so we can keep the same approval process for SOC2 auditability. The gist of this proposal is to replace the current `VERSION` file with a `LATEST` file, which would have the following format: ``` ef5d32b7438e481de0235c5538aedab419682388 0.13.53-alpha.20200214.3025.ef5d32b7 ``` This file would be maintained with a script to reduce manual labor in producing the version string. Other than that, the process will be largely the same, with releases triggered by changes to this `LATEST` and the release notes files. Version numbers =============== Because one of the goals is to reduce the velocity of our published version numbers, we need a different version scheme for our snapshot releases. Fortunately, most version schemes have some support for that; unfortunately, the SDK sits at the intersection of three different version schemes that have made incompatible choices. Without going into too much detail: - Semantic versioning (which we chose as the version format for the SDK version number) allows for "prerelease" version numbers as well as "metadata"; an example of a complete version string would be `1.2.3-nightly.201+server12.43`. The "main" part of the version string always has to have 3 numbers separated by dots; the "prerelease" (after the `-` but before the `+`) and the "metadata" (after the `+`) parts are optional and, if present, must consist of one or more segments separated by dots, where a segment can be either a number or an alphanumeric string. In terms of ordering, metadata is irrelevant and any version with a prerelease string is before the corresponding "main" version string alone. Amongst prereleases, segments are compared in order with purely numeric ones compared as numbers and mixed ones compared lexicographically. So 1.2.3 is more recent than 1.2.3-1, which is itself less recent than 1.2.3-2. - Maven version strings are any number of segments separated by a `.`, a `-`, or a transition between a number and a letter. Version strings are compared element-wise, with numeric segments being compared as numbers. Alphabetic segments are treated specially if they happen to be one of a handful of magic words (such as "alpha", "beta" or "snapshot" for example) which count as "qualifiers"; a version string with a qualifier is "before" its prefix (`1.2.3` is before `1.2.3-alpha.3`, which is the same as `1.2.3-alpha3` or `1.2.3-alpha-3`), and there is a special ordering amongst qualifiers. Other alphabetic segments are compared alphabetically and count as being "after" their prefix (`1.2.3-really-final-this-time` counts as being released after `1.2.3`). - GHC package numbers are comprised of any number of numeric segments separated by `.`, plus an optional (though deprecated) alphanumeric "version tag" separated by a `-`. I could not find any official documentation on ordering for the version tag; numeric segments are compared as numbers. - npm uses semantic versioning so that is covered already. After much more investigation than I'd care to admit, I have come up with the following compromise as the least-bad solution. First, obviously, the version string for stable/marketing versions is going to be "standard" semver, i.e. major.minor.patch, all numbers, which works, and sorts as expected, for all three schemes. For snapshot releases, we shall use the following (semver) format: ``` 0.13.53-alpha.20200214.3025.ef5d32b7 ``` where the components are, respectively: - `0.13.53`: the expected version string of the next "stable" release. - `alpha`: a marker that hopefully scares people enough. - `20200214`: the date of the release commit, which _MUST_ be on master. - `3025`: the number of commits in master up to the release commit (included). Because we have a linear, append-only master branch, this uniquely identifies the commit. - `ef5d32b7ù : the first 8 characters of the release commit sha. This is not strictly speaking necessary, but makes it a lot more convenient to identify the commit. The main downsides of this format are: 1. It is not a valid format for GHC packages. We do not publish GHC packages from the SDK (so far we have instead opted to release our Haskell code as separate packages entirely), so this should not be an issue. However, our SDK version currently leaks to `ghc-pkg` as the version string for the stdlib (and prim) packages. This PR addresses that by tweaking the compiler to remove the offending bits, so `ghc-pkg` would see the above version number as `0.13.53.20200214.3025`, which should be enough to uniquely identify it. Note that, as far as I could find out, this number would never be exposed to users. 2. It is rather long, which I think is good from a human perspective as it makes it more scary. However, I have been told that this may be long enough to cause issues on Windows by pushing us past the max path size limitation of that "OS". I suggest we try it and see what happens. The upsides are: - It clearly indicates it is an unstable release (`alpha`). - It clearly indicates how old it is, by including the date. - To humans, it is immediately obvious which version is "later" even if they have the same date, allowing us to release same-day patches if needed. (Note: that is, commits that were made on the same day; the release date itself is irrelevant here.) - It contains the git sha so the commit built for that release is immediately obvious. - It sorts correctly under all schemes (modulo the modification for GHC). Alternatives I considered: - Pander to GHC: 0.13.53-alpha-20200214-3025-ef5d32b7. This format would be accepted by all schemes, but will not sort as expected under semantic versioning (though Maven will be fine). I have no idea how it will sort under GHC. - Not having any non-numeric component, e.g. `0.13.53.20200214.3025`. This is not valid semantic versioning and is therefore rejected by npm. - Not having detailed info: just go with `0.13.53-snapshot`. This is what is generally done in the Java world, but we then lose track of what version is actually in use and I'm concerned about bug reports. This would also not let us publish to the main Maven repo (at least not more than once), as artifacts there are supposed to be immutable. - No having a qualifier: `0.13.53-3025` would be acceptable to all three version formats. However, it would not clearly indicate to humans that it is not meant as a stable version, and would sort differently under semantic versioning (which counts it as a prerelease, i.e. before `0.13.53`) than under maven (which counts it as a patch, so after `0.13.53`). - Just counting releases: `0.13.53-alpha.1`, where we just count the number of prereleases in-between `0.13.52` and the next. This is currently the fallback plan if Windows path length causes issues. It would be less convenient to map releases to commits, but it could still be done via querying the history of the `LATEST` file. Release notes ============= > Note: We have decided not to have release notes for snapshot releases. Release notes are a bit tricky. Because we want the ability to make snapshot releases, then later on promote them to stable releases, it follows that we want to build commits from the past. However, if we decide post-hoc that a commit is actually a good candidate for a release, there is no way that commit can have the appropriate release notes: it cannot know what version number it's getting, and, moreover, we now track changes in commit messages. And I do not think anyone wants to go back to the release notes file being a merge bottleneck. But release notes need to be published to the releases blog upon releasing a stable version, and the docs website needs to be updated and include them. The only sensible solution here is to pick up the release notes as of the commit that triggers the release. As the docs cron runs asynchronously, this means walking down the git history to find the relevant commit. > Note: We could probably do away with the asynchronicity at this point. > It was originally included to cover for the possibility of a release > failing. If we are releasing commits from the past after they have been > tested, this should not be an issue anymore. If the docs generation were > part of the synchronous release step, it would have direct access to the > correct release notes without having to walk down the git history. > > However, I think it is more prudent to keep this change as a future step, > after we're confident the new release scheme does indeed produce much more > reliable "stable" releases. New release process =================== Just like releases are currently controlled mostly by detecting changes to the `VERSION` file, the new process will be controlled by detecting changes to the `LATEST` file. The format of that file will include both the version string and the corresponding SHA. Upon detecting a change to the `LATEST` file, CI will run the entire release process, just like it does now with the VERSION file. The main differences are: 1. Before running the release step, CI will checkout the commit specified in the LATEST file. This requires separating the release step from the build step, which in my opinion is cleaner anyway. 2. The `//:VERSION` Bazel target is replaced by a repository rule that gets the version to build from an environment variable, with a default of `0.0.0` to remain consistent with the current `daml-head` behaviour. Some of the manual steps will need to be skipped for a snapshot release. See amended `release/RELEASE.md` in this commit for details. The main caveat of this approach is that the official release will be a different binary from the corresponding snapshot. It will have been built from the same source, but with a different version string. This is somewhat mitigated by Bazel caching, meaning any build step that does not depend on the version string should use the cache and produce identical results. I do not think this can be avoided when our artifact includes its own version number. I must note, though, that while going through the changes required after removing the `VERSION` file, I have been quite surprised at the sheer number of things that actually depend on the SDK version number. I believe we should look into reducing that over time. CHANGELOG_BEGIN CHANGELOG_END
2020-02-25 19:01:23 +03:00
build_environment(name = "build_environment")
load("//bazel_tools:oracle.bzl", "oracle_configure")
oracle_configure(name = "oracle")
load("//bazel_tools:scala_version.bzl", "scala_version_configure")
scala_version_configure(name = "scala_version")
load(
"@scala_version//:index.bzl",
"scala_artifacts",
"scala_major_version",
"scala_major_version_suffix",
"scala_version",
)
dadew(name = "dadew")
load("@os_info//:os_info.bzl", "is_darwin", "is_linux", "is_windows")
load("//bazel_tools:ghc_dwarf.bzl", "ghc_dwarf")
ghc_dwarf(name = "ghc_dwarf")
load("@ghc_dwarf//:ghc_dwarf.bzl", "enable_ghc_dwarf")
2019-04-04 11:33:38 +03:00
# Configure msys2 POSIX toolchain provided by dadew.
load("//bazel_tools/dev_env_tool:dev_env_tool.bzl", "dadew_sh_posix_configure")
dadew_sh_posix_configure() if is_windows else None
2019-04-04 11:33:38 +03:00
nixpkgs_local_repository(
name = "nixpkgs",
nix_file = "//nix:nixpkgs.nix",
nix_file_deps = [
"//nix:nixpkgs/default.nix",
"//nix:nixpkgs/default.src.json",
"//nix:system.nix",
2019-04-04 11:33:38 +03:00
],
)
dev_env_nix_repos = {
"nixpkgs": "@nixpkgs",
}
load("//bazel_tools:damlc_legacy.bzl", "damlc_legacy")
damlc_legacy(
name = "damlc_legacy",
sha256 = {
"linux": "dd1c7f2d34f3eac631c7edc1637c9b3e93c341561d41828b4f0d8e897effa90f",
"windows": "f458b8d2612887915372aad61766120e34c0fdc6a65eb37cdb1a8efc58e14de3",
"macos": "63141d7168e883c0b8c212dca6198f5463f82aa82bbbc51d8805ce7e474300e4",
},
version = "1.18.0-snapshot.20211117.8399.0.a05a40ae",
)
2019-04-04 11:33:38 +03:00
# Bazel cannot automatically determine which files a Nix target depends on.
# rules_nixpkgs offers the nix_file_deps attribute for that purpose. It should
# list all files that a target depends on. This allows Bazel to rebuild the
# target using Nix if any of these files has been changed. Omitting files from
# this list can cause subtle bugs or cache misses when Bazel loads an outdated
# store path. You can use the following command to determine what files a Nix
# target depends on. E.g. for tools.curl
#
# $ nix-build -vv -A tools.curl nix 2>&1 \
# | egrep '(evaluating file|copied source)' \
# | egrep -v '/nix/store'
#
# Unfortunately there is no mechanism to automatically keep this list up to
# date at the moment. See https://github.com/tweag/rules_nixpkgs/issues/74.
common_nix_file_deps = [
"//nix:bazel.nix",
"//nix:nixpkgs.nix",
"//nix:system.nix",
"//nix:nixpkgs/default.nix",
"//nix:nixpkgs/default.src.json",
2019-04-04 11:33:38 +03:00
]
# Use Nix provisioned cc toolchain
nixpkgs_cc_configure(
2019-04-04 11:33:38 +03:00
nix_file = "//nix:bazel-cc-toolchain.nix",
nix_file_deps = common_nix_file_deps + [
"//nix:tools/bazel-cc-toolchain/default.nix",
2019-04-04 11:33:38 +03:00
],
repositories = dev_env_nix_repos,
) if not is_windows else None
2019-04-04 11:33:38 +03:00
nixpkgs_python_configure(repository = "@nixpkgs") if not is_windows else None
2019-04-04 11:33:38 +03:00
# Curl system dependency
nixpkgs_package(
name = "curl_nix",
attribute_path = "curl",
2019-04-04 11:33:38 +03:00
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
FreePort draw from outside ephemeral port range (#10774) * Test case for LockedFreePort not colliding with port 0 changelog_begin changelog_end * Discover dynamic port range on Linux * Random port generator outside ephemeral range * remove dev comments * Draw FreePort from outside the ephemeral port range Note, there is a race condition between the socket being closed and the lock-file being created in LockedFreePort. This is not a new issue, it was already present with the previous port 0 based implementation. LockedFreePort handles this by attempting to find a free port and taking a file lock multiple times. But, it could happen that A `find`s port N, and obtains the lock, but doesn't bind port N again, yet; then B binds port N during `find`; then A attempts to bind port N before B could release it again and fails because B still holds it. * Select dynamic port range based on OS * Detect dynamic port range on MacOS and Windows * Import sysctl from Nix on MacOS changelog_begin changelog_end * Windows line separator * FreePort helpers visibility * Use more informative exception types * Use a more light weight unit test * Add comments * Fix Windows * Update libs-scala/ports/src/main/scala/com/digitalasset/ports/FreePort.scala Co-authored-by: Stefano Baghino <43749967+stefanobaghino-da@users.noreply.github.com> * Update libs-scala/ports/src/main/scala/com/digitalasset/ports/FreePort.scala Co-authored-by: Stefano Baghino <43749967+stefanobaghino-da@users.noreply.github.com> * Add a comment to clarify the generated port range * fmt * unused import * Split libs-scala/ports Splits the FreePort and LockedFreePort components into a separate library as this is only used for testing purposes. Co-authored-by: Andreas Herrmann <andreas.herrmann@tweag.io> Co-authored-by: Stefano Baghino <43749967+stefanobaghino-da@users.noreply.github.com>
2021-09-07 16:41:43 +03:00
# Sysctl system dependency
nixpkgs_package(
name = "sysctl_nix",
attribute_path = "sysctl",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
# Toxiproxy dependency
nixpkgs_package(
name = "toxiproxy_nix",
attribute_path = "toxiproxy",
fail_not_supported = False,
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
dev_env_tool(
name = "toxiproxy_dev_env",
nix_include = ["bin/toxiproxy-cmd"],
nix_label = "@toxiproxy_nix",
nix_paths = ["bin/toxiproxy-cmd"],
tools = ["toxiproxy"],
win_include = ["toxiproxy-server-windows-amd64.exe"],
win_paths = ["toxiproxy-server-windows-amd64.exe"],
win_tool = "toxiproxy",
)
2019-04-04 11:33:38 +03:00
# Patchelf system dependency
nixpkgs_package(
name = "patchelf_nix",
attribute_path = "patchelf",
2019-04-04 11:33:38 +03:00
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
# netcat dependency
nixpkgs_package(
name = "netcat_nix",
attribute_path = "netcat-gnu",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
dev_env_tool(
name = "netcat_dev_env",
nix_include = ["bin/nc"],
nix_label = "@netcat_nix",
nix_paths = ["bin/nc"],
tools = ["nc"],
win_include = ["usr/bin/nc.exe"],
win_paths = ["usr/bin/nc.exe"],
win_tool = "msys2",
)
nixpkgs_package(
name = "openssl_nix",
attribute_path = "openssl",
fail_not_supported = False,
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
dev_env_tool(
name = "openssl_dev_env",
nix_include = ["bin/openssl"],
nix_label = "@openssl_nix",
nix_paths = ["bin/openssl"],
tools = ["openssl"],
win_include = [
"usr/bin",
"usr/ssl",
],
win_paths = ["usr/bin/openssl.exe"],
win_tool = "msys2",
)
2019-04-04 11:33:38 +03:00
# Tar & gzip dependency
nixpkgs_package(
name = "tar_nix",
attribute_path = "gnutar",
fail_not_supported = False,
2019-04-04 11:33:38 +03:00
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
2019-04-04 11:33:38 +03:00
dev_env_tool(
name = "tar_dev_env",
nix_include = ["bin/tar"],
nix_label = "@tar_nix",
nix_paths = ["bin/tar"],
tools = ["tar"],
win_include = ["usr/bin/tar.exe"],
win_paths = ["usr/bin/tar.exe"],
win_tool = "msys2",
2019-04-04 11:33:38 +03:00
)
nixpkgs_package(
name = "gzip_nix",
attribute_path = "gzip",
fail_not_supported = False,
2019-04-04 11:33:38 +03:00
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
2019-04-04 11:33:38 +03:00
dev_env_tool(
name = "gzip_dev_env",
nix_include = ["bin/gzip"],
nix_label = "@gzip_nix",
nix_paths = ["bin/gzip"],
tools = ["gzip"],
win_include = ["usr/bin/gzip.exe"],
win_paths = ["usr/bin/gzip.exe"],
win_tool = "msys2",
2019-04-04 11:33:38 +03:00
)
nixpkgs_package(
name = "patch_nix",
attribute_path = "gnupatch",
fail_not_supported = False,
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
dev_env_tool(
name = "patch_dev_env",
nix_include = ["bin/patch"],
nix_label = "@patch_nix",
nix_paths = ["bin/patch"],
tools = ["patch"],
win_include = ["usr/bin/patch.exe"],
win_paths = ["usr/bin/patch.exe"],
win_tool = "msys2",
)
dev_env_tool(
name = "mvn_dev_env",
nix_include = ["bin/mvn"],
nix_label = "@mvn_nix",
nix_paths = ["bin/mvn"],
tools = ["mvn"],
win_include = [
"bin",
"boot",
"conf",
"lib",
],
win_paths = ["bin/mvn"],
win_tool = "maven-3.6.1",
)
2019-04-04 11:33:38 +03:00
nixpkgs_package(
name = "awk_nix",
attribute_path = "gawk",
2019-04-04 11:33:38 +03:00
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
nixpkgs_package(
name = "coreutils_nix",
attribute_path = "coreutils",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
nixpkgs_package(
name = "grpcurl_nix",
attribute_path = "grpcurl",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
2019-04-04 11:33:38 +03:00
nixpkgs_package(
name = "hlint_nix",
attribute_path = "hlint",
2019-04-04 11:33:38 +03:00
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
2019-04-04 11:33:38 +03:00
repositories = dev_env_nix_repos,
)
nixpkgs_package(
name = "zip_nix",
attribute_path = "zip",
fail_not_supported = False,
2019-04-04 11:33:38 +03:00
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
nixpkgs_package(
name = "jekyll_nix",
attribute_path = "jekyll",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
2019-04-04 11:33:38 +03:00
load(
"@rules_haskell//haskell:ghc_bindist.bzl",
2019-04-04 11:33:38 +03:00
"haskell_register_ghc_bindists",
)
load(
"@rules_haskell//haskell:nixpkgs.bzl",
2019-04-04 11:33:38 +03:00
"haskell_register_ghc_nixpkgs",
)
nixpkgs_package(
name = "glibc_locales",
attribute_path = "glibcLocales",
build_file_content = """
package(default_visibility = ["//visibility:public"])
filegroup(
name = "locale-archive",
srcs = ["lib/locale/locale-archive"],
)
""",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
) if is_linux else None
# This is used to get ghc-pkg on Linux.
nixpkgs_package(
name = "ghc_nix",
attribute_path = "ghc",
build_file_content = """
package(default_visibility = ["//visibility:public"])
exports_files(glob(["lib/**/*"]))
""",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
) if not is_windows else None
common_ghc_flags = [
# We default to -c opt but we also want -O1 in -c dbg builds
# since we use them for profiling.
"-O1",
"-hide-package=ghc-boot-th",
"-hide-package=ghc-boot",
]
2019-04-04 11:33:38 +03:00
# Used by Darwin and Linux
haskell_register_ghc_nixpkgs(
attribute_path = "ghcDwarf" if enable_ghc_dwarf else "ghc",
build_file = "@io_tweag_rules_nixpkgs//nixpkgs:BUILD.pkg",
# -fexternal-dynamic-refs is required so that we produce position-independent
# relocations against some functions (-fPIC alone isnt sufficient).
# -split-sections would allow us to produce significantly smaller binaries, e.g., for damlc,
# the binary shrinks from 186MB to 83MB. -split-sections only works on Linux but
# we get a similar behavior on Darwin by default.
# However, we had to disable split-sections for now as it seems to interact very badly
# with the GHCi linker to the point where :main takes several minutes rather than several seconds.
compiler_flags = common_ghc_flags + [
"-fexternal-dynamic-refs",
] + (["-g3"] if enable_ghc_dwarf else ([
"-optl-unexported_symbols_list=*",
"-optc-mmacosx-version-min=10.14",
"-opta-mmacosx-version-min=10.14",
"-optl-mmacosx-version-min=10.14",
] if is_darwin else ["-optl-s"])),
compiler_flags_select = {
"@com_github_digital_asset_daml//:profiling_build": ["-fprof-auto"],
"//conditions:default": [],
},
locale_archive = "@glibc_locales//:locale-archive",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repl_ghci_args = [
"-O0",
"-fexternal-interpreter",
"-Wwarn",
],
repositories = dev_env_nix_repos,
version = "9.0.2",
2019-04-04 11:33:38 +03:00
)
# Used by Windows
haskell_register_ghc_bindists(
compiler_flags = common_ghc_flags,
version = "9.0.2",
2019-04-04 11:33:38 +03:00
) if is_windows else None
nixpkgs_package(
name = "jq",
attribute_path = "jq",
fail_not_supported = False,
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
2019-04-04 11:33:38 +03:00
)
dev_env_tool(
name = "jq_dev_env",
nix_include = ["bin/jq"],
nix_label = "@jq",
nix_paths = ["bin/jq"],
tools = ["jq"],
win_include = ["mingw64/bin"],
win_include_as = {"mingw64/bin": "bin"},
win_paths = ["bin/jq.exe"],
win_tool = "msys2",
)
nixpkgs_package(
name = "mvn_nix",
attribute_path = "mvn",
fail_not_supported = False,
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
2019-04-04 11:33:38 +03:00
#node & npm
nixpkgs_package(
name = "node_nix",
attribute_path = "nodejsNested",
build_file_content = 'exports_files(glob(["node_nix/**"]))',
fail_not_supported = False,
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
2019-04-04 11:33:38 +03:00
)
#sass
nixpkgs_package(
name = "sass_nix",
attribute_path = "sass",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
2019-04-04 11:33:38 +03:00
)
#tex
nixpkgs_package(
name = "texlive_nix",
attribute_path = "texlive",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
2019-04-04 11:33:38 +03:00
)
#sphinx
nixpkgs_package(
name = "sphinx_nix",
attribute_path = "sphinx-exts",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
2019-04-04 11:33:38 +03:00
)
#Imagemagick
nixpkgs_package(
name = "imagemagick_nix",
attribute_path = "imagemagick",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
2019-04-04 11:33:38 +03:00
)
# This only makes sense on Windows so we just put dummy values in the nix fields.
dev_env_tool(
name = "makensis_dev_env",
nix_include = [""],
nix_paths = ["bin/makensis.exe"],
tools = ["makensis"],
win_include = [
"bin",
"contrib",
"include",
"plugins",
"stubs",
],
win_paths = ["bin/makensis.exe"],
win_tool = "nsis-3.04",
) if is_windows else None
# Scaladoc
nixpkgs_package(
name = "scala_nix",
attribute_path = "scala_{}".format(scala_major_version_suffix),
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
2019-04-04 11:33:38 +03:00
# Dummy target //external:python_headers.
# To avoid query errors due to com_google_protobuf.
# See https://github.com/protocolbuffers/protobuf/blob/d9ccd0c0e6bbda9bf4476088eeb46b02d7dcd327/util/python/BUILD
bind(
name = "python_headers",
actual = "@com_google_protobuf//util/python:python_headers",
)
http_archive(
name = "static_asset_d3plus",
build_file_content = 'exports_files(["js/d3.min.js", "js/d3plus.min.js"])',
sha256 = "7d31a500a4850364a966ac938eea7f2fa5ce1334966b52729079490636e7049a",
strip_prefix = "d3plus.v1.9.8",
type = "zip",
urls = ["https://github.com/alexandersimoes/d3plus/releases/download/v1.9.8/d3plus.zip"],
)
load("//:bazel-haskell-deps.bzl", "daml_haskell_deps")
daml_haskell_deps()
load("@rules_haskell//tools:repositories.bzl", "rules_haskell_worker_dependencies")
# We don't use the worker mode, but this is required for bazel query to function.
# Call this after `daml_haskell_deps` to ensure that the right `stack` is used.
rules_haskell_worker_dependencies()
Revert "Switch to Bazel 5 (#12935)" (#12974) This reverts commit 4c0118df4d0304153e8f6ed9e3cfb1a1295876df. This breaks passing -p to bazel test for Haskell tests, e.g., bazel test //compiler/damlc/tests:integration-v1dev --test_arg -p --test_arg InterfaceEq which breaks with something horrifying like moritz@adjunction ~/daml (main)> bazel test //compiler/damlc/tests:integration-v1dev --test_arg -p --test_arg InterfaceEq [dev-env] Building tools.bazel... [dev-env] Built tools.bazel in /nix/store/m7gzlmr0pqjpl01ihgvazxgfs3sfwl61-bazel and linked to /home/moritz/daml/dev-env/var/gc-roots/bazel [dev-env] Building tools.find... [dev-env] Built tools.find in /nix/store/645v3545lcbx77wq7355rgdrgbhn5wx7-findutils-4.8.0 and linked to /home/moritz/daml/dev-env/var/gc-roots/find INFO: Invocation ID: 034b3e45-851f-472e-ab71-b7f718829582 DEBUG: /home/moritz/.cache/bazel/_bazel_moritz/bb4e4404f889dc1b816f246b08c0d9ea/external/rules_haskell/haskell/private/versions.bzl:60:10: WARNING: bazel version is too recent. Supported versions range from 4.0.0 to 4.2.1, but found: 5.0.0- (@non-git) /nix/store/dadkhf8vch2i2kvig962ilfr5j3chshr-go-1.17.6 /nix/store/pzh24n543i6jqa01hdmgqknlyf294bn1-bazel-nixpkgs-posix-toolchain /nix/store/2hfwndk47wpvaib06qyhcdp83b423xvh-jq-1.6-bin /nix/store/hjggs9s82qh7r5j8sgapn389hf24wdx8-bazel-nixpkgs-cc-toolchain /nix/store/yxgg3bn4v288sc00kf09svrwz2r461c9-ghc-native-bignum-9.0.2 /nix/store/2hwx0jhcdsx3wfvmb50ih19jkh2ra4jh-glibc-locales-2.33-108 /nix/store/8wpmx049z8m0ffhy3jyi41qb6pbxwvy8-bazel-nixpkgs-java-runtime ERROR: file 'external/bazel_tools/src/tools/launcher/_objs/launcher/dummy.pic.o' is generated by these conflicting actions: Label: @bazel_tools//src/tools/launcher:launcher RuleClass: cc_binary rule Configuration: 869f6bbd14d2ac66dcacb164312d88fa059b017e3c17513cb5051b23879ac7f9, 8c138cc79f95329f01b0d325511ad0ea89ca5126e421e67e5239706d0f247c2c Mnemonic: CppCompile Action key: 3d79fe1470dcb842d5944c98dfe7a62715db6d86fdb12d3ff60af3bdf41b7996 Progress message: Compiling src/tools/launcher/dummy.cc PrimaryInput: File:[/home/moritz/.cache/bazel/_bazel_moritz/bb4e4404f889dc1b816f246b08c0d9ea/external/bazel_tools[source]]src/tools/launcher/dummy.cc PrimaryOutput: File:[[<execution_root>]bazel-out/k8-opt/bin]external/bazel_tools/src/tools/launcher/_objs/launcher/dummy.pic.o Owner information: ConfiguredTargetKey{label=@bazel_tools//src/tools/launcher:launcher, config=BuildConfigurationValue.Key[869f6bbd14d2ac66dcacb164312d88fa059b017e3c17513cb5051b23879ac7f9]}, ConfiguredTargetKey{label=@bazel_tools//src/tools/launcher:launcher, config=BuildConfigurationValue.Key[8c138cc79f95329f01b0d325511ad0ea89ca5126e421e67e5239706d0f247c2c]} MandatoryInputs: are equal Outputs: are equal ERROR: file 'external/bazel_tools/src/tools/launcher/_objs/launcher/dummy.o' is generated by these conflicting actions: Label: @bazel_tools//src/tools/launcher:launcher RuleClass: cc_binary rule Configuration: 869f6bbd14d2ac66dcacb164312d88fa059b017e3c17513cb5051b23879ac7f9, 8c138cc79f95329f01b0d325511ad0ea89ca5126e421e67e5239706d0f247c2c Mnemonic: CppCompile Action key: 9f46e824944add9e9951ef51ddb6cb4b744bc97f90b5749132179d1d1699dfa1 Progress message: Compiling src/tools/launcher/dummy.cc PrimaryInput: File:[/home/moritz/.cache/bazel/_bazel_moritz/bb4e4404f889dc1b816f246b08c0d9ea/external/bazel_tools[source]]src/tools/launcher/dummy.cc PrimaryOutput: File:[[<execution_root>]bazel-out/k8-opt/bin]external/bazel_tools/src/tools/launcher/_objs/launcher/dummy.o Owner information: ConfiguredTargetKey{label=@bazel_tools//src/tools/launcher:launcher, config=BuildConfigurationValue.Key[869f6bbd14d2ac66dcacb164312d88fa059b017e3c17513cb5051b23879ac7f9]}, ConfiguredTargetKey{label=@bazel_tools//src/tools/launcher:launcher, config=BuildConfigurationValue.Key[8c138cc79f95329f01b0d325511ad0ea89ca5126e421e67e5239706d0f247c2c]} MandatoryInputs: are equal Outputs: are equal ERROR: com.google.devtools.build.lib.skyframe.ArtifactConflictFinder$ConflictException: com.google.devtools.build.lib.actions.MutableActionGraph$ActionConflictException: for external/bazel_tools/src/tools/launcher/_objs/launcher/dummy.o, previous action: action 'Compiling src/tools/launcher/dummy.cc', attempted action: action 'Compiling src/tools/launcher/dummy.cc' changelog_begin changelog_end
2022-02-16 21:24:00 +03:00
load("//bazel_tools:java.bzl", "dadew_java_configure", "nixpkgs_java_configure")
dadew_java_configure(
name = "dadew_java_runtime",
dadew_path = "ojdkbuild11",
) if is_windows else None
nixpkgs_java_configure(
attribute_path = "jdk11.home",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
) if not is_windows else None
2019-04-04 11:33:38 +03:00
# rules_go used here to compile a wrapper around the protoc-gen-scala plugin
load("@io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies")
nixpkgs_package(
name = "go_nix",
attribute_path = "go",
build_file_content = """
filegroup(
name = "sdk",
srcs = glob(["share/go/**"]),
visibility = ["//visibility:public"],
)
""",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
2019-04-04 11:33:38 +03:00
)
# A repository that generates the Go SDK imports, see
# ./bazel_tools/go_sdk/README.md
local_repository(
name = "go_sdk_repo",
path = "bazel_tools/go_sdk",
)
load("@io_bazel_rules_go//go:deps.bzl", "go_wrap_sdk")
# On Nix platforms we use the Nix provided Go SDK, on Windows we let Bazel pull
# an upstream one.
go_wrap_sdk(
name = "go_sdk",
root_file = "@go_nix//:share/go/ROOT",
) if not is_windows else None
2019-04-04 11:33:38 +03:00
Update protobuf docs plugin (#11880) * Try to upgrade protobuf docs plugin changelog_begin changelog_end * Fix extension number 65020 is already registered Building `//ledger-api/grpc-definitions:ledger-api-docs` [failed with](https://github.com/digital-asset/daml/issues/11761#issuecomment-978947565) ``` panic: proto: extension number 65020 is already registered on message google.protobuf.FieldOptions ``` Go dependencies are now pulled in via Gazelle. By default Gazelle will generate new proto rules for any `.proto` files encountered in third party Go dependencies. However, many of these already have pregenerated `.pb.go` files generate with the appropriate configuration. The problem can be avoided by configuring Gazelle to not generate new proto rules, but instead use pre-existing `.pb.go` files. For reference the field number is set in [go-proto-validators](https://github.com/mwitkow/go-proto-validators/blob/32a686adf8b5194d3ea07d632d49b6fb344af678/validator.proto#L19) which is an indirect dependency through protoc-gen-doc. In this case we need to update protoc-gen-validate to v0.6.2 to include https://github.com/envoyproxy/protoc-gen-validate/commit/4f41f10dde19ec4dd9dd8cf6f7537139f52fb58b which fixes unknown label errors. * ./fmt * Expose gRPC status.proto for Haskell bindings * Update Gazelle to support embedsrcs on Windows `protoc-gen-doc` relies on `go:embed` file embedding https://github.com/pseudomuto/protoc-gen-doc/blob/2dde01902b28e8b2201d935467bc4308ec912255/resources.go#L8. Gazelle supports `embedsrcs`, however, it did not generate the attribute correctly on Windows due to the different directory separator. This is fixed in https://github.com/bazelbuild/bazel-gazelle/pull/1101. * Add gazelle to compatibility workspace It's loaded into `@daml`'s top-level `BUILD` file and ends up being a dependency of the compatibility workspace as well. * shift go_googleapis import * Delete dead code protobuf is imported transitively. * Document how to add Go dependencies Co-authored-by: Moritz Kiefer <moritz.kiefer@purelyfunctional.org> Co-authored-by: Andreas Herrmann <andreas.herrmann@tweag.io>
2021-11-25 23:51:24 +03:00
# gazelle:repo bazel_gazelle
load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies")
load("//:go_deps.bzl", "go_deps")
# gazelle:repository_macro go_deps.bzl%go_deps
go_deps()
2019-04-04 11:33:38 +03:00
go_rules_dependencies()
go_register_toolchains() if not is_windows else go_register_toolchains(version = "1.16.9")
2019-04-04 11:33:38 +03:00
gazelle_dependencies()
Update protobuf docs plugin (#11880) * Try to upgrade protobuf docs plugin changelog_begin changelog_end * Fix extension number 65020 is already registered Building `//ledger-api/grpc-definitions:ledger-api-docs` [failed with](https://github.com/digital-asset/daml/issues/11761#issuecomment-978947565) ``` panic: proto: extension number 65020 is already registered on message google.protobuf.FieldOptions ``` Go dependencies are now pulled in via Gazelle. By default Gazelle will generate new proto rules for any `.proto` files encountered in third party Go dependencies. However, many of these already have pregenerated `.pb.go` files generate with the appropriate configuration. The problem can be avoided by configuring Gazelle to not generate new proto rules, but instead use pre-existing `.pb.go` files. For reference the field number is set in [go-proto-validators](https://github.com/mwitkow/go-proto-validators/blob/32a686adf8b5194d3ea07d632d49b6fb344af678/validator.proto#L19) which is an indirect dependency through protoc-gen-doc. In this case we need to update protoc-gen-validate to v0.6.2 to include https://github.com/envoyproxy/protoc-gen-validate/commit/4f41f10dde19ec4dd9dd8cf6f7537139f52fb58b which fixes unknown label errors. * ./fmt * Expose gRPC status.proto for Haskell bindings * Update Gazelle to support embedsrcs on Windows `protoc-gen-doc` relies on `go:embed` file embedding https://github.com/pseudomuto/protoc-gen-doc/blob/2dde01902b28e8b2201d935467bc4308ec912255/resources.go#L8. Gazelle supports `embedsrcs`, however, it did not generate the attribute correctly on Windows due to the different directory separator. This is fixed in https://github.com/bazelbuild/bazel-gazelle/pull/1101. * Add gazelle to compatibility workspace It's loaded into `@daml`'s top-level `BUILD` file and ends up being a dependency of the compatibility workspace as well. * shift go_googleapis import * Delete dead code protobuf is imported transitively. * Document how to add Go dependencies Co-authored-by: Moritz Kiefer <moritz.kiefer@purelyfunctional.org> Co-authored-by: Andreas Herrmann <andreas.herrmann@tweag.io>
2021-11-25 23:51:24 +03:00
load("@go_googleapis//:repository_rules.bzl", "switched_rules_by_language")
2019-04-04 11:33:38 +03:00
Update protobuf docs plugin (#11880) * Try to upgrade protobuf docs plugin changelog_begin changelog_end * Fix extension number 65020 is already registered Building `//ledger-api/grpc-definitions:ledger-api-docs` [failed with](https://github.com/digital-asset/daml/issues/11761#issuecomment-978947565) ``` panic: proto: extension number 65020 is already registered on message google.protobuf.FieldOptions ``` Go dependencies are now pulled in via Gazelle. By default Gazelle will generate new proto rules for any `.proto` files encountered in third party Go dependencies. However, many of these already have pregenerated `.pb.go` files generate with the appropriate configuration. The problem can be avoided by configuring Gazelle to not generate new proto rules, but instead use pre-existing `.pb.go` files. For reference the field number is set in [go-proto-validators](https://github.com/mwitkow/go-proto-validators/blob/32a686adf8b5194d3ea07d632d49b6fb344af678/validator.proto#L19) which is an indirect dependency through protoc-gen-doc. In this case we need to update protoc-gen-validate to v0.6.2 to include https://github.com/envoyproxy/protoc-gen-validate/commit/4f41f10dde19ec4dd9dd8cf6f7537139f52fb58b which fixes unknown label errors. * ./fmt * Expose gRPC status.proto for Haskell bindings * Update Gazelle to support embedsrcs on Windows `protoc-gen-doc` relies on `go:embed` file embedding https://github.com/pseudomuto/protoc-gen-doc/blob/2dde01902b28e8b2201d935467bc4308ec912255/resources.go#L8. Gazelle supports `embedsrcs`, however, it did not generate the attribute correctly on Windows due to the different directory separator. This is fixed in https://github.com/bazelbuild/bazel-gazelle/pull/1101. * Add gazelle to compatibility workspace It's loaded into `@daml`'s top-level `BUILD` file and ends up being a dependency of the compatibility workspace as well. * shift go_googleapis import * Delete dead code protobuf is imported transitively. * Document how to add Go dependencies Co-authored-by: Moritz Kiefer <moritz.kiefer@purelyfunctional.org> Co-authored-by: Andreas Herrmann <andreas.herrmann@tweag.io>
2021-11-25 23:51:24 +03:00
switched_rules_by_language(
name = "com_google_googleapis_imports",
grpc = True,
java = True,
2019-04-04 11:33:38 +03:00
)
load("//:bazel-java-deps.bzl", "install_java_deps")
install_java_deps()
load("@maven//:defs.bzl", "pinned_maven_install")
pinned_maven_install()
load("@io_bazel_rules_scala//:scala_config.bzl", "scala_config")
scala_config(scala_version)
load("@io_bazel_rules_scala//scala:scala.bzl", "scala_repositories")
2019-04-04 11:33:38 +03:00
scala_repositories(
fetch_sources = True,
overriden_artifacts = scala_artifacts,
)
load("@io_bazel_rules_scala//scala:toolchains.bzl", "scala_register_toolchains")
Disable writing volatile bits in Scala statsfile (#11875) Make Scala targets deterministic by disabling writing milliseconds timestamps into the statsfile. Before (cache disabled) ``` $ n=1; bazel clean; bazel build //libs-scala/adjustable-clock --execution_log_json_file=execlog$n.json; rsync -aL bazel-bin output$n $ n=2; bazel clean; bazel build //libs-scala/adjustable-clock --execution_log_json_file=execlog$n.json; rsync -aL bazel-bin output$n $ diff -u execlog1.json execlog2.json --- execlog1.json 2021-11-25 09:53:45.638891575 +0100 +++ execlog2.json 2021-11-25 10:01:52.831225558 +0100 @@ -311392,7 +311392,7 @@ }, { "path": "bazel-out/k8-opt/bin/external/io_bazel_rules_scala/third_party/dependency_analyzer/src/main/scala_version.statsfile", "digest": { - "hash": "1c535baa94812c16d33b6b3dcf2dec8e12b77e3a28cd1280ca2c9678840c4de6", + "hash": "a7f6f1b168649c1c5346ff92c69f0095b68781690e4b3f33321d8f35af8b3d77", "sizeBytes": "16", "hashFunctionName": "SHA-256" } @@ -311705,7 +311705,7 @@ }, { "path": "bazel-out/k8-opt/bin/external/io_bazel_rules_scala/third_party/dependency_analyzer/src/main/dependency_analyzer.statsfile", "digest": { - "hash": "b3af662512a906ec56abfb222f43af189b88b00ae0449bd44c25b7e90689c4db", + "hash": "d6c6e2a36754efea4863593a838fbfb500a66e0150d3c3e2e2490ad65c22a2cd", "sizeBytes": "16", "hashFunctionName": "SHA-256" } @@ -312025,7 +312025,7 @@ }, { "path": "bazel-out/k8-opt/bin/libs-scala/adjustable-clock/adjustable-clock.statsfile", "digest": { - "hash": "4326088227e5ecf2c21e76ddb6d77c9692ef1689dd146f65ec1e75c235382251", + "hash": "8a6b23f3c64ce3ff171772808ca0064af52bc145a32d1b6398de124aeb813c97", "sizeBytes": "16", "hashFunctionName": "SHA-256" } $ diff -r output1 output2 diff -ur output1/bazel-bin/external/io_bazel_rules_scala/third_party/dependency_analyzer/src/main/dependency_analyzer.statsfile output2/bazel-bin/external/io_bazel_rules_scala/third_party/dependency_analyzer/src/main/dependency_analyzer.statsfile --- output1/bazel-bin/external/io_bazel_rules_scala/third_party/dependency_analyzer/src/main/dependency_analyzer.statsfile 2021-11-25 09:53:41.706824833 +0100 +++ output2/bazel-bin/external/io_bazel_rules_scala/third_party/dependency_analyzer/src/main/dependency_analyzer.statsfile 2021-11-25 10:01:49.047160467 +0100 @@ -1 +1 @@ -build_time=2507 +build_time=2522 diff -ur output1/bazel-bin/external/io_bazel_rules_scala/third_party/dependency_analyzer/src/main/scala_version.statsfile output2/bazel-bin/external/io_bazel_rules_scala/third_party/dependency_analyzer/src/main/scala_version.statsfile --- output1/bazel-bin/external/io_bazel_rules_scala/third_party/dependency_analyzer/src/main/scala_version.statsfile 2021-11-25 09:53:39.110780774 +0100 +++ output2/bazel-bin/external/io_bazel_rules_scala/third_party/dependency_analyzer/src/main/scala_version.statsfile 2021-11-25 10:01:46.427115402 +0100 @@ -1 +1 @@ -build_time=4162 +build_time=3946 diff -ur output1/bazel-bin/libs-scala/adjustable-clock/adjustable-clock.statsfile output2/bazel-bin/libs-scala/adjustable-clock/adjustable-clock.statsfile --- output1/bazel-bin/libs-scala/adjustable-clock/adjustable-clock.statsfile 2021-11-25 09:53:45.282885532 +0100 +++ output2/bazel-bin/libs-scala/adjustable-clock/adjustable-clock.statsfile 2021-11-25 10:01:52.511220053 +0100 @@ -1 +1 @@ -build_time=3158 +build_time=3100 ``` After (cache disabled) ``` $ n=3; bazel clean; bazel build //libs-scala/adjustable-clock --execution_log_json_file=execlog$n.json; rsync -aL bazel-bin output$n $ n=4; bazel clean; bazel build //libs-scala/adjustable-clock --execution_log_json_file=execlog$n.json; rsync -aL bazel-bin output$n $ diff -u execlog3.json execlog4.json $ diff -ur output3 output4 ``` See https://github.com/bazelbuild/rules_scala/pull/1298 changelog_begin changelog_end Co-authored-by: Andreas Herrmann <andreas.herrmann@tweag.io>
2021-11-25 14:01:17 +03:00
register_toolchains("//bazel_tools/scala:toolchain")
2019-04-04 11:33:38 +03:00
load("@io_bazel_rules_scala//testing:scalatest.bzl", "scalatest_repositories", "scalatest_toolchain")
scalatest_repositories()
scalatest_toolchain()
load("//bazel_tools:scalapb.bzl", "scalapb_version")
http_archive(
name = "scalapb",
build_file_content = """
proto_library(
name = "scalapb_proto",
srcs = ["protobuf/scalapb/scalapb.proto"],
strip_import_prefix = "protobuf/",
deps = [
"@com_google_protobuf//:descriptor_proto",
],
visibility = ["//visibility:public"],
)
""",
sha256 = "2ddce4c5927fa8dd80069fba2fb60199f5b2b95e81e8da69b132665fae6c638c",
strip_prefix = "ScalaPB-{}".format(scalapb_version),
urls = ["https://github.com/scalapb/ScalaPB/archive/refs/tags/v{}.tar.gz".format(scalapb_version)],
)
2019-04-04 11:33:38 +03:00
load("@io_bazel_rules_scala//jmh:jmh.bzl", "jmh_repositories")
2019-04-04 11:33:38 +03:00
jmh_repositories()
# TODO (aherrmann) This wrapper is only used on Windows.
# Replace by an appropriate Windows only `dadew_tool` call.
dev_env_tool(
name = "nodejs_dev_env",
nix_include = [
"bin",
"include",
"lib",
"share",
],
nix_label = "@node_nix",
nix_paths = [],
prefix = "nodejs_dev_env",
tools = [],
win_include = [
".",
],
win_paths = [],
win_tool = "nodejs",
2019-04-04 11:33:38 +03:00
)
# Setup the Node.js toolchain
load("@build_bazel_rules_nodejs//:index.bzl", "node_repositories", "yarn_install")
2019-04-04 11:33:38 +03:00
node_repositories(
package_json = ["//:package.json"],
# Using `dev_env_tool` introduces an additional layer of symlink
# indirection. Bazel doesn't track dependencies through symbolic links.
# Occasionally, this can cause build failures on CI if a build is not
# invalidated despite a change of an original source. To avoid such issues
# we use the `nixpkgs_package` directly.
vendored_node = "@nodejs_dev_env" if is_windows else "@node_nix",
2019-04-04 11:33:38 +03:00
)
yarn_install(
name = "npm",
args = ["--frozen-lockfile"],
2019-04-04 11:33:38 +03:00
package_json = "//:package.json",
symlink_node_modules = False,
2019-04-04 11:33:38 +03:00
yarn_lock = "//:yarn.lock",
)
# TODO use fine-grained managed dependency
yarn_install(
name = "daml_extension_deps",
args = ["--frozen-lockfile"],
package_json = "//compiler/daml-extension:package.json",
symlink_node_modules = False,
yarn_lock = "//compiler/daml-extension:yarn.lock",
2019-04-04 11:33:38 +03:00
)
# TODO use fine-grained managed dependency
yarn_install(
name = "navigator_frontend_deps",
args = ["--frozen-lockfile"],
package_json = "//navigator/frontend:package.json",
symlink_node_modules = False,
yarn_lock = "//navigator/frontend:yarn.lock",
2019-04-04 11:33:38 +03:00
)
# Weve had a bunch of problems with typescript rules on Windows.
# Therefore weve disabled them completely for now.
# Since we need to @load stuff in @language_support_ts_deps
# and load statements cant be conditional, we create a dummy
# workspace on Windows.
# See #4162 for more details.
yarn_install(
name = "language_support_ts_deps",
args = ["--frozen-lockfile"],
package_json = "//language-support/ts/packages:package.json",
symlink_node_modules = False,
yarn_lock = "//language-support/ts/packages:yarn.lock",
) if not is_windows else create_workspace(
name = "language_support_ts_deps",
files = {
"eslint/BUILD.bazel": 'exports_files(["index.bzl"])',
"eslint/index.bzl": "def eslint_test(*args, **kwargs):\n pass",
"jest-cli/BUILD.bazel": 'exports_files(["index.bzl"])',
"jest-cli/index.bzl": "def jest_test(*args, **kwargs):\n pass",
"@bazel/typescript/BUILD.bazel": 'exports_files(["index.bzl"])',
"@bazel/typescript/index.bzl": "def ts_project(*args, **kwargs):\n pass",
},
)
2019-04-04 11:33:38 +03:00
# We usually use the _deploy_jar target to produce self-contained jars, but here we're using jar_jar because the size
# of codegen tool is substantially reduced (as shown below) and that the presence of JVM internal com.sun classes could
# theoretically stop the codegen running against JVMs other the OpenJDK 8 (the current JVM used for building).
load("@com_github_johnynek_bazel_jar_jar//:jar_jar.bzl", "jar_jar_repositories")
2019-04-04 11:33:38 +03:00
jar_jar_repositories()
# The following is advertised by rules_proto, but we define our own dependencies
# in dependencies.yaml. So all we need to do is replicate the binds here
# https://github.com/stackb/rules_proto/tree/master/java#java_grpc_library
# load("@io_grpc_grpc_java//:repositories.bzl", "grpc_java_repositories")
# grpc_java_repositories()
# Load the grpc deps last, since it won't try to load already loaded
# dependencies.
load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps")
2019-04-04 11:33:38 +03:00
grpc_deps()
2019-08-26 17:00:41 +03:00
load("@upb//bazel:workspace_deps.bzl", "upb_deps")
upb_deps()
load("@build_bazel_rules_apple//apple:repositories.bzl", "apple_rules_dependencies")
apple_rules_dependencies()
load("@com_github_bazelbuild_buildtools//buildifier:deps.bzl", "buildifier_dependencies")
buildifier_dependencies()
nixpkgs_package(
name = "postgresql_nix",
attribute_path = "postgresql_10",
fail_not_supported = False,
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
dev_env_tool(
name = "postgresql_dev_env",
nix_include = [
"bin",
"include",
"lib",
"share",
],
nix_label = "@postgresql_nix",
nix_paths = [
"bin/createdb",
"bin/dropdb",
"bin/initdb",
"bin/pg_ctl",
"bin/postgres",
],
required_tools = {
"initdb": ["postgres"],
"pg_ctl": ["postgres"],
},
tools = [
"createdb",
"dropdb",
"initdb",
"pg_ctl",
"postgres",
],
win_include = [
"mingw64/bin",
"mingw64/include",
"mingw64/lib",
"mingw64/share",
],
win_include_as = {
"mingw64/bin": "bin",
"mingw64/include": "include",
"mingw64/lib": "lib",
"mingw64/share": "share",
},
win_paths = [
"bin/createdb.exe",
"bin/dropdb.exe",
"bin/initdb.exe",
"bin/pg_ctl.exe",
"bin/postgres.exe",
],
win_tool = "msys2",
)
nixpkgs_package(
name = "buf",
attribute_path = "buf",
fail_not_supported = False,
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
)
nixpkgs_package(
name = "script_nix",
attribute_path = "script",
nix_file = "//nix:bazel.nix",
nix_file_deps = common_nix_file_deps,
repositories = dev_env_nix_repos,
) if not is_windows else None