daml/bazel_tools/proto.bzl

323 lines
11 KiB
Python
Raw Normal View History

# Copyright (c) 2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
2019-04-04 11:33:38 +03:00
# SPDX-License-Identifier: Apache-2.0
load("//bazel_tools:java.bzl", "da_java_library")
load("//bazel_tools:javadoc_library.bzl", "javadoc_library")
load("//bazel_tools:pkg.bzl", "pkg_empty_zip")
load("//bazel_tools:pom_file.bzl", "pom_file")
load("//bazel_tools:scala.bzl", "scala_source_jar", "scaladoc_jar")
load("@io_bazel_rules_scala//scala:scala.bzl", "scala_library")
load("@os_info//:os_info.bzl", "is_windows")
load("@rules_pkg//:pkg.bzl", "pkg_tar")
load("@rules_proto//proto:defs.bzl", "proto_library")
load("@scala_version//:index.bzl", "scala_major_version_suffix")
2019-04-04 11:33:38 +03:00
# taken from rules_proto:
# https://github.com/stackb/rules_proto/blob/f5d6eea6a4528bef3c1d3a44d486b51a214d61c2/compile.bzl#L369-L393
def get_plugin_runfiles(tool, plugin_runfiles):
2019-04-04 11:33:38 +03:00
"""Gather runfiles for a plugin.
"""
files = []
if not tool:
return files
info = tool[DefaultInfo]
if not info:
return files
if info.files:
files += info.files.to_list()
if info.default_runfiles:
runfiles = info.default_runfiles
if runfiles.files:
files += runfiles.files.to_list()
if info.data_runfiles:
runfiles = info.data_runfiles
if runfiles.files:
files += runfiles.files.to_list()
if plugin_runfiles:
for target in plugin_runfiles:
files += target.files.to_list()
2019-04-04 11:33:38 +03:00
return files
def _proto_gen_impl(ctx):
sources_out = ctx.actions.declare_directory(ctx.attr.name + "-sources")
descriptor_set_delim = "\\;" if _is_windows(ctx) else ":"
descriptors = [depset for src in ctx.attr.srcs for depset in src[ProtoInfo].transitive_descriptor_sets.to_list()]
args = [
"--descriptor_set_in=" + descriptor_set_delim.join([depset.path for depset in descriptors]),
"--{}_out={}:{}".format(ctx.attr.plugin_name, ",".join(ctx.attr.plugin_options), sources_out.path),
2019-04-04 11:33:38 +03:00
]
plugins = []
plugin_runfiles = []
if ctx.attr.plugin_name not in ["java", "python"]:
plugins = [ctx.executable.plugin_exec]
plugin_runfiles = get_plugin_runfiles(ctx.attr.plugin_exec, ctx.attr.plugin_runfiles)
2019-04-04 11:33:38 +03:00
args += [
"--plugin=protoc-gen-{}={}".format(ctx.attr.plugin_name, ctx.executable.plugin_exec.path),
2019-04-04 11:33:38 +03:00
]
inputs = []
for src in ctx.attr.srcs:
Bazel 1.1 (#3249) * bazel: 0.28.1 --> 1.1.0 * bazel-watcher sha256 * Fix missing line in patch * proto_source_root --> strip_import_prefix See https://github.com/bazelbuild/bazel/issues/7153 for details. * Update rules_nixpkgs Required to avoid errors of the form ``` ERROR: An error occurred during the fetch of repository 'node_nix': parameter 'sep' may not be specified by name, for call to method split(sep, maxsplit = None) of 'string' ``` and ``` ERROR: An error occurred during the fetch of repository 'node_nix': Traceback (most recent call last): File "/private/var/tmp/_bazel_runner/17d2b3954f1c6dcf5414d5453467df9a/external/io_tweag_rules_nixpkgs/nixpkgs/nixpkgs.bzl", line 149 _execute_or_fail(repository_ctx, <3 more arguments>) File "/private/var/tmp/_bazel_runner/17d2b3954f1c6dcf5414d5453467df9a/external/io_tweag_rules_nixpkgs/nixpkgs/nixpkgs.bzl", line 318, in _execute_or_fail fail(<1 more arguments>) Cannot build Nix attribute 'nodejs'. Command: [/Users/runner/.nix-profile/bin/nix-build, /private/var/tmp/_bazel_runner/17d2b3954f1c6dcf5414d5453467df9a/external/node_nix/nix/bazel.nix, "-A", "nodejs", "--out-link", "bazel-support/nix-out-link", "-I", "nixpkgs=/private/var/tmp/_bazel_runner/17d2b3954f1c6dcf5414d5453467df9a/external/nixpkgs/nixpkgs"] Return code: 1 Error output: src/main/tools/process-tools.cc:173: "setitimer": Invalid argument ``` * Update rules_scala * .proto has been removed, use [ProtoInfo] instead See https://docs.bazel.build/versions/1.1.0/be/protocol-buffer.html#proto_library * python3_nix add nix_file attribute To avoid the following error ``` ERROR: /home/aj/tweag.io/da/da-bazel-1.1/BUILD:66:1: //:nix_python3_runtime depends on @python3_nix//:bin/python in repository @python3_nix which failed to fetch. no such package '@python3_nix//': Traceback (most recent call last): File "/home/aj/.cache/bazel/_bazel_aj/5f825ad28f8e070f999ba37395e46ee5/external/io_tweag_rules_nixpkgs/nixpkgs/nixpkgs.bzl", line 149 _execute_or_fail(repository_ctx, <3 more arguments>) File "/home/aj/.cache/bazel/_bazel_aj/5f825ad28f8e070f999ba37395e46ee5/external/io_tweag_rules_nixpkgs/nixpkgs/nixpkgs.bzl", line 318, in _execute_or_fail fail(<1 more arguments>) Cannot build Nix attribute 'python3'. Command: [/home/aj/.nix-profile/bin/nix-build, "-E", "import <nixpkgs> { config = {}; overlays = []; }", "-A", "python3", "--out-link", "bazel-support/nix-out-link", "-I", "nixpkgs=/home/aj/.cache/bazel/_bazel_aj/5f825ad28f8e070f999ba37395e46ee5/external/nixpkgs/nixpkgs"] Return code: 1 Error output: error: anonymous function at /home/aj/.cache/bazel/_bazel_aj/5f825ad28f8e070f999ba37395e46ee5/external/nixpkgs/nixpkgs.nix:3:1 called with unexpected argument 'config', at (string):1:1 ``` * rules_haskell unnamed string.split(_, maxsplit = _) The keyword argument may no longer be named. * string.replace(_, _, maxsplit = _) may not be named * Move proto sources from deps to data Fixes ``` ERROR: /home/aj/tweag.io/da/da-bazel-1.1/daml-lf/archive/BUILD.bazel:150:1: in deps attribute of scala_test rule //daml-lf/archive:daml_lf_archive_reader_tests_test_suite_src_test_scala_com_digitalasset_daml_lf_archive_DecodeV1Spec.scala: '//daml-lf/archive:daml_lf_1.6_archive_proto_srcs' does not have mandatory providers: 'JavaInfo'. Since this rule was created by the macro 'da_scala_test_suite', the error might have been caused by the macro implementation ``` * Define sha256 for haskell_ghc__paths Bazel 1.1.0 fails on missing hashes. * Disable --incompatible_windows_native_test_wrapper * //compiler/daml-extension don't modify sources Modifying sources in-place can cause issues on Windows, where build actions are not sandboxed and changes on sources can affect other build steps. * bazel-genfiles --> bazel-bin The bazel-genfiles symlink has been removed since Bazel 1.0. See https://github.com/bazelbuild/bazel/issues/8651 * Mark dev_env_tool repository rule as configure See https://docs.bazel.build/versions/1.1.0/skylark/lib/globals.html#repository_rule * Move data deps into data attribute * Mark dev_env_tool as local = True * Manually fetch @makensis_dev_env
2019-11-11 12:06:03 +03:00
src_root = src[ProtoInfo].proto_source_root
for direct_source in src[ProtoInfo].direct_sources:
2019-04-04 11:33:38 +03:00
path = ""
2019-04-04 11:33:38 +03:00
# in some cases the paths of src_root and direct_source are only partially
# overlapping. the following for loop finds the maximum overlap of these two paths
for i in range(len(src_root) + 1):
2019-04-04 11:33:38 +03:00
if direct_source.path.startswith(src_root[-i:]):
path = direct_source.path[i:]
else:
# this noop is needed to make bazel happy
noop = ""
path = direct_source.short_path if not path else path
path = path[1:] if path.startswith("/") else path
inputs += [path]
args += inputs
posix = ctx.toolchains["@rules_sh//sh/posix:toolchain_type"]
2019-04-04 11:33:38 +03:00
ctx.actions.run_shell(
mnemonic = "ProtoGen",
outputs = [sources_out],
inputs = descriptors + [ctx.executable.protoc] + plugin_runfiles,
command = posix.commands["mkdir"] + " -p " + sources_out.path + " && " + ctx.executable.protoc.path + " " + " ".join(args),
2019-04-04 11:33:38 +03:00
tools = plugins,
)
# since we only have the output directory of the protoc compilation,
# we need to find all the files below sources_out and add them to the zipper args file
zipper_args_file = ctx.actions.declare_file(ctx.label.name + ".zipper_args")
ctx.actions.run_shell(
mnemonic = "CreateZipperArgsFile",
outputs = [zipper_args_file],
inputs = [sources_out],
command = "{find} -L {src_path} -type f | {sed} -E 's#^{src_path}/(.*)$#\\1={src_path}/\\1#' | {sort} > {args_file}".format(
find = posix.commands["find"],
sed = posix.commands["sed"],
sort = posix.commands["sort"],
src_path = sources_out.path,
args_file = zipper_args_file.path,
2019-04-04 11:33:38 +03:00
),
progress_message = "zipper_args_file %s" % zipper_args_file.path,
)
# Call zipper to create srcjar
zipper_args = ctx.actions.args()
zipper_args.add("c")
zipper_args.add(ctx.outputs.out.path)
zipper_args.add("@%s" % zipper_args_file.path)
ctx.actions.run(
executable = ctx.executable._zipper,
inputs = [sources_out, zipper_args_file],
outputs = [ctx.outputs.out],
arguments = [zipper_args],
progress_message = "srcjar %s" % ctx.outputs.out.short_path,
)
proto_gen = rule(
implementation = _proto_gen_impl,
attrs = {
"srcs": attr.label_list(providers = [ProtoInfo]),
2019-04-04 11:33:38 +03:00
"plugin_name": attr.string(),
"plugin_exec": attr.label(
cfg = "host",
executable = True,
2019-04-04 11:33:38 +03:00
),
"plugin_options": attr.string_list(),
"plugin_runfiles": attr.label_list(
default = [],
allow_files = True,
),
2019-04-04 11:33:38 +03:00
"protoc": attr.label(
default = Label("@com_google_protobuf//:protoc"),
cfg = "host",
allow_files = True,
executable = True,
),
"_zipper": attr.label(
default = Label("@bazel_tools//tools/zip:zipper"),
cfg = "host",
executable = True,
allow_files = True,
),
},
outputs = {
"out": "%{name}.srcjar",
2019-04-04 11:33:38 +03:00
},
output_to_genfiles = True,
toolchains = ["@rules_sh//sh/posix:toolchain_type"],
2019-04-04 11:33:38 +03:00
)
def _is_windows(ctx):
return ctx.configuration.host_path_separator == ";"
def _maven_tags(group, artifact_prefix, artifact_suffix):
if group and artifact_prefix:
artifact = artifact_prefix + "-" + artifact_suffix
return ["maven_coordinates=%s:%s:__VERSION__" % (group, artifact)]
else:
return []
def _proto_scala_srcs(name, grpc):
return [
":%s" % name,
"//bazel_tools/scalapb:scalapb-configuration",
] + ([
"@com_github_googleapis_googleapis//google/rpc:code_proto",
"@com_github_googleapis_googleapis//google/rpc:error_details_proto",
"@com_github_googleapis_googleapis//google/rpc:status_proto",
"@com_github_grpc_grpc//src/proto/grpc/health/v1:health_proto_descriptor",
] if grpc else [])
def _proto_scala_deps(grpc, proto_deps):
return [
"@maven//:com_google_protobuf_protobuf_java",
"@maven//:com_thesamet_scalapb_lenses_{}".format(scala_major_version_suffix),
"@maven//:com_thesamet_scalapb_scalapb_runtime_{}".format(scala_major_version_suffix),
] + ([
"@maven//:com_thesamet_scalapb_scalapb_runtime_grpc_{}".format(scala_major_version_suffix),
"@maven//:io_grpc_grpc_api",
"@maven//:io_grpc_grpc_core",
"@maven//:io_grpc_grpc_protobuf",
"@maven//:io_grpc_grpc_stub",
] if grpc else []) + [
"%s_scala" % label
for label in proto_deps
]
def proto_jars(
name,
srcs,
visibility = None,
strip_import_prefix = "",
grpc = False,
deps = [],
proto_deps = [],
java_deps = [],
scala_deps = [],
javadoc_root_packages = [],
maven_group = None,
maven_artifact_prefix = None,
maven_artifact_proto_suffix = "proto",
maven_artifact_java_suffix = "java-proto",
maven_artifact_scala_suffix = "scala-proto"):
# NOTE (MK) An empty string flattens the whole structure which is
# rarely what you want, see https://github.com/bazelbuild/rules_pkg/issues/82
tar_strip_prefix = "." if not strip_import_prefix else strip_import_prefix
# Tarball containing the *.proto files.
pkg_tar(
name = "%s_tar" % name,
srcs = srcs,
extension = "tar.gz",
strip_prefix = tar_strip_prefix,
visibility = [":__subpackages__", "//release:__subpackages__"],
)
# JAR and source JAR containing the *.proto files.
da_java_library(
name = "%s_jar" % name,
srcs = None,
deps = None,
runtime_deps = ["%s_jar" % label for label in proto_deps],
resources = srcs,
resource_strip_prefix = "%s/%s/" % (native.package_name(), strip_import_prefix),
tags = _maven_tags(maven_group, maven_artifact_prefix, maven_artifact_proto_suffix),
visibility = visibility,
)
# An empty Javadoc JAR for uploading the source proto JAR to Maven Central.
pkg_empty_zip(
name = "%s_jar_javadoc" % name,
out = "%s_jar_javadoc.jar" % name,
)
# Compiled protobufs.
proto_library(
name = name,
srcs = srcs,
strip_import_prefix = strip_import_prefix,
visibility = visibility,
deps = deps + proto_deps,
)
# JAR and source JAR containing the generated Java bindings.
native.java_proto_library(
name = "%s_java" % name,
tags = _maven_tags(maven_group, maven_artifact_prefix, maven_artifact_java_suffix),
visibility = visibility,
deps = [":%s" % name],
)
if maven_group and maven_artifact_prefix:
pom_file(
name = "%s_java_pom" % name,
tags = _maven_tags(maven_group, maven_artifact_prefix, maven_artifact_java_suffix),
target = ":%s_java" % name,
visibility = visibility,
)
if javadoc_root_packages:
javadoc_library(
name = "%s_java_javadoc" % name,
srcs = [":%s_java" % name],
root_packages = javadoc_root_packages,
deps = ["@maven//:com_google_protobuf_protobuf_java"],
) if not is_windows else None
else:
# An empty Javadoc JAR for uploading the compiled proto JAR to Maven Central.
pkg_empty_zip(
name = "%s_java_javadoc" % name,
out = "%s_java_javadoc.jar" % name,
)
# JAR containing the generated Scala bindings.
proto_gen(
name = "%s_scala_sources" % name,
srcs = _proto_scala_srcs(name, grpc),
plugin_exec = "//scala-protoc-plugins/scalapb:protoc-gen-scalapb",
plugin_name = "scalapb",
plugin_options = ["grpc"] if grpc else [],
)
all_scala_deps = _proto_scala_deps(grpc, proto_deps)
scala_library(
name = "%s_scala" % name,
srcs = [":%s_scala_sources" % name],
tags = _maven_tags(maven_group, maven_artifact_prefix, maven_artifact_scala_suffix),
unused_dependency_checker_mode = "error",
visibility = visibility,
deps = all_scala_deps,
exports = all_scala_deps,
)
scala_source_jar(
name = "%s_scala_src" % name,
srcs = [":%s_scala_sources" % name],
)
scaladoc_jar(
name = "%s_scala_scaladoc" % name,
srcs = [":%s_scala_sources" % name],
tags = ["scaladoc"],
deps = [],
) if is_windows == False else None
if maven_group and maven_artifact_prefix:
pom_file(
name = "%s_scala_pom" % name,
target = ":%s_scala" % name,
visibility = visibility,
)