Remove all instances of use_default_shell_env = True (#3597)

* use_default_shell_env = False in proto_gen

* use_default_shell_env = False in scala_source_jar

* use_default_shell_env = False in scaladoc_jar

* use_default_shell_env = False in dar_to_scala

* use_default_shell_env = False in _real_pkg_tar

* use_default_shell_env = False in client_server_build

* use_default_shell_env = False in npm_package
This commit is contained in:
Andreas Herrmann 2019-11-25 10:15:00 +01:00 committed by mergify[bot]
parent 1bd2ec5b1a
commit 10031a614c
8 changed files with 66 additions and 42 deletions

View File

@ -2,6 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
def _client_server_build_impl(ctx):
posix = ctx.toolchains["@rules_sh//sh/posix:toolchain_type"]
ctx.actions.run_shell(
outputs = [ctx.outputs.out],
inputs = ctx.files.data,
@ -10,15 +11,15 @@ def _client_server_build_impl(ctx):
ctx.executable.client,
ctx.executable.server,
]),
use_default_shell_env = True,
command = """
export {output_env}="{output_path}"
{runner} "{client}" "{client_args} {client_files}" "{server}" "{server_args} {server_files}" &> runner.log
if [ "$?" -ne 0 ]; then
cat runner.log
{cat} runner.log
exit 1
fi
""".format(
cat = posix.commands["cat"],
output_env = ctx.attr.output_env,
output_path = ctx.outputs.out.path,
runner = ctx.executable._runner.path,
@ -63,6 +64,7 @@ client_server_build = rule(
outputs = {
"out": "%{name}.out",
},
toolchains = ["@rules_sh//sh/posix:toolchain_type"],
)
"""Creates a build target for a client-server run.

View File

@ -36,7 +36,7 @@ def _remap(remap_paths, path):
return path
def _quote(filename, protect = "="):
"""Quote the filename, by escaping = by \= and \ by \\"""
"""Quote the filename, by escaping = by \= and \ by \\ """
return filename.replace("\\", "\\\\").replace(protect, "\\" + protect)
def _pkg_tar_impl(ctx):
@ -121,7 +121,6 @@ def _pkg_tar_impl(ctx):
arguments = ["--flagfile", arg_file.path],
outputs = [ctx.outputs.out],
mnemonic = "PackageTar",
use_default_shell_env = True,
)
def _pkg_deb_impl(ctx):

View File

@ -83,13 +83,13 @@ def _proto_gen_impl(ctx):
args += inputs
posix = ctx.toolchains["@rules_sh//sh/posix:toolchain_type"]
ctx.actions.run_shell(
mnemonic = "ProtoGen",
outputs = [sources_out],
inputs = descriptors + [ctx.executable.protoc] + plugin_runfiles,
command = "mkdir -p " + sources_out.path + " && " + ctx.executable.protoc.path + " " + " ".join(args),
command = posix.commands["mkdir"] + " -p " + sources_out.path + " && " + ctx.executable.protoc.path + " " + " ".join(args),
tools = plugins,
use_default_shell_env = True,
)
# since we only have the output directory of the protoc compilation,
@ -99,12 +99,14 @@ def _proto_gen_impl(ctx):
mnemonic = "CreateZipperArgsFile",
outputs = [zipper_args_file],
inputs = [sources_out],
command = "find -L {src_path} -type f | sed -E 's#^{src_path}/(.*)$#\\1={src_path}/\\1#' | sort > {args_file}".format(
command = "{find} -L {src_path} -type f | {sed} -E 's#^{src_path}/(.*)$#\\1={src_path}/\\1#' | {sort} > {args_file}".format(
find = posix.commands["find"],
sed = posix.commands["sed"],
sort = posix.commands["sort"],
src_path = sources_out.path,
args_file = zipper_args_file.path,
),
progress_message = "zipper_args_file %s" % zipper_args_file.path,
use_default_shell_env = True,
)
# Call zipper to create srcjar
@ -152,6 +154,7 @@ proto_gen = rule(
"out": "%{name}.srcjar",
},
output_to_genfiles = True,
toolchains = ["@rules_sh//sh/posix:toolchain_type"],
)
def _is_windows(ctx):

View File

@ -1,24 +0,0 @@
From 6b26f94f159b1c722348200b09ea7f3e0e9f9e13 Mon Sep 17 00:00:00 2001
From: Florian Klink <flokli@flokli.de>
Date: Tue, 26 Mar 2019 18:55:18 +0100
Subject: [PATCH] Set use_default_shell_env
---
internal/npm_package/npm_package.bzl | 1 +
1 file changed, 1 insertion(+)
diff --git a/internal/npm_package/npm_package.bzl b/internal/npm_package/npm_package.bzl
index 3e20fd0..df5974a 100644
--- a/internal/npm_package/npm_package.bzl
+++ b/internal/npm_package/npm_package.bzl
@@ -70,6 +70,7 @@ def create_package(ctx, deps_sources, nested_packages):
inputs = inputs,
outputs = [package_dir, ctx.outputs.pack, ctx.outputs.publish],
arguments = [args],
+ use_default_shell_env = True,
execution_requirements = {
# Never schedule this action remotely because it's not computationally expensive.
# It just copies files into a directory; it's not worth copying inputs and outputs to a remote worker.
--
2.19.2

View File

@ -0,0 +1,30 @@
diff --git a/internal/npm_package/npm_package.bzl b/internal/npm_package/npm_package.bzl
index 7b19296b..af1ed3b0 100644
--- a/internal/npm_package/npm_package.bzl
+++ b/internal/npm_package/npm_package.bzl
@@ -66,6 +66,7 @@ def create_package(ctx, deps_sources, nested_packages):
if ctx.version_file:
inputs.append(ctx.version_file)
+ posix = ctx.toolchains["@rules_sh//sh/posix:toolchain_type"]
ctx.actions.run(
progress_message = "Assembling npm package %s" % package_dir.short_path,
executable = ctx.executable._packager,
@@ -80,6 +81,9 @@ def create_package(ctx, deps_sources, nested_packages):
# See https://github.com/bazelbuild/rules_nodejs/issues/187
"local": "1",
},
+ env = {
+ "PATH": ctx.host_configuration.host_path_separator.join(posix.paths),
+ },
)
return package_dir
@@ -168,6 +172,7 @@ npm_package = rule(
implementation = _npm_package,
attrs = NPM_PACKAGE_ATTRS,
outputs = NPM_PACKAGE_OUTPUTS,
+ toolchains = ["@rules_sh//sh/posix:toolchain_type"],
)
"""The npm_package rule creates a directory containing a publishable npm artifact.

View File

@ -171,22 +171,29 @@ def _scala_source_jar_impl(ctx):
for new_path in _strip_path_upto(src.path, ctx.attr.strip_upto):
zipper_args.append("%s=%s" % (new_path, src.path))
posix = ctx.toolchains["@rules_sh//sh/posix:toolchain_type"]
if len(tmpsrcdirs) > 0:
tmpsrc_cmds = [
"(find -L {tmpsrc_path} -type f | sed -E 's#^{tmpsrc_path}/(.*)$#\\1={tmpsrc_path}/\\1#')".format(tmpsrc_path = tmpsrcdir.path)
"({find} -L {tmpsrc_path} -type f | {sed} -E 's#^{tmpsrc_path}/(.*)$#\\1={tmpsrc_path}/\\1#')".format(
find = posix.commands["find"],
sed = posix.commands["sed"],
tmpsrc_path = tmpsrcdir.path,
)
for tmpsrcdir in tmpsrcdirs
]
cmd = "(echo -e \"{src_paths}\" && {joined_tmpsrc_cmds}) | sort > {args_file}".format(
cmd = "(echo -e \"{src_paths}\" && {joined_tmpsrc_cmds}) | {sort} > {args_file}".format(
src_paths = "\\n".join(zipper_args),
joined_tmpsrc_cmds = " && ".join(tmpsrc_cmds),
args_file = zipper_args_file.path,
sort = posix.commands["sort"],
)
inputs = tmpsrcdirs + [manifest_file] + ctx.files.srcs
else:
cmd = "echo -e \"{src_paths}\" | sort > {args_file}".format(
cmd = "echo -e \"{src_paths}\" | {sort} > {args_file}".format(
src_paths = "\\n".join(zipper_args),
args_file = zipper_args_file.path,
sort = posix.commands["sort"],
)
inputs = [manifest_file] + ctx.files.srcs
@ -196,7 +203,6 @@ def _scala_source_jar_impl(ctx):
inputs = inputs,
command = cmd,
progress_message = "find_scala_source_files %s" % zipper_args_file.path,
use_default_shell_env = True,
)
ctx.actions.run(
@ -229,6 +235,7 @@ scala_source_jar = rule(
outputs = {
"out": "%{name}.jar",
},
toolchains = ["@rules_sh//sh/posix:toolchain_type"],
)
def _create_scala_source_jar(**kwargs):
@ -322,16 +329,19 @@ def _scaladoc_jar_impl(ctx):
# since we only have the output directory of the scaladoc generation we need to find
# all the files below sources_out and add them to the zipper args file
zipper_args_file = ctx.actions.declare_file(ctx.label.name + ".zipper_args")
posix = ctx.toolchains["@rules_sh//sh/posix:toolchain_type"]
ctx.actions.run_shell(
mnemonic = "ScaladocFindOutputFiles",
outputs = [zipper_args_file],
inputs = [outdir],
command = "find -L {src_path} -type f | sed -E 's#^{src_path}/(.*)$#\\1={src_path}/\\1#' | sort > {args_file}".format(
command = "{find} -L {src_path} -type f | {sed} -E 's#^{src_path}/(.*)$#\\1={src_path}/\\1#' | {sort} > {args_file}".format(
find = posix.commands["find"],
sed = posix.commands["sed"],
sort = posix.commands["sort"],
src_path = outdir.path,
args_file = zipper_args_file.path,
),
progress_message = "find_scaladoc_output_files %s" % zipper_args_file.path,
use_default_shell_env = True,
)
ctx.actions.run(
@ -368,6 +378,7 @@ scaladoc_jar = rule(
outputs = {
"out": "%{name}.jar",
},
toolchains = ["@rules_sh//sh/posix:toolchain_type"],
)
"""
Generates a Scaladoc jar path/to/target/<name>.jar.

View File

@ -167,7 +167,7 @@ def daml_deps():
name = "build_bazel_rules_nodejs",
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/0.32.2/rules_nodejs-0.32.2.tar.gz"],
sha256 = "6d4edbf28ff6720aedf5f97f9b9a7679401bf7fca9d14a0fff80f644a99992b4",
patches = ["@com_github_digital_asset_daml//bazel_tools:rules_nodejs_default_shell_env.patch"],
patches = ["@com_github_digital_asset_daml//bazel_tools:rules_nodejs_posix_path.patch"],
patch_args = ["-p1"],
)

View File

@ -4,6 +4,7 @@
def _dar_to_scala_impl(ctx):
codegen_out_dir = ctx.actions.declare_directory(ctx.label.name + "_codegen_out")
srcjar_out_file = ctx.outputs.srcjar_out
posix = ctx.toolchains["@rules_sh//sh/posix:toolchain_type"]
# Call Scala codegen
gen_args = ctx.actions.args()
@ -18,7 +19,6 @@ def _dar_to_scala_impl(ctx):
arguments = [gen_args],
progress_message = "scala codegen files: %s" % ctx.attr.name,
executable = ctx.executable._codegen,
use_default_shell_env = True,
)
# Create zipper_args file
@ -27,12 +27,14 @@ def _dar_to_scala_impl(ctx):
mnemonic = "CreateZipperArgsFile",
outputs = [zipper_args_file],
inputs = [codegen_out_dir],
command = "find -L {src_path} -type f | sed -E 's#^{src_path}/(.*)$#\\1={src_path}/\\1#' | sort > {args_file}".format(
command = "{find} -L {src_path} -type f | {sed} -E 's#^{src_path}/(.*)$#\\1={src_path}/\\1#' | {sort} > {args_file}".format(
find = posix.commands["find"],
sed = posix.commands["sed"],
sort = posix.commands["sort"],
src_path = codegen_out_dir.path,
args_file = zipper_args_file.path,
),
progress_message = "zipper_args_file: %s" % zipper_args_file.path,
use_default_shell_env = True,
)
# Call zipper to create srcjar
@ -85,4 +87,5 @@ dar_to_scala = rule(
"srcjar_out": "%{srcjar_out}",
},
output_to_genfiles = True,
toolchains = ["@rules_sh//sh/posix:toolchain_type"],
)