Merge commit '808530c1c137c576db584c4c849899b172815cb3' as 'prelude'

This commit is contained in:
Brian Hicks 2023-04-26 16:10:46 -05:00
commit 1e9cc34a34
482 changed files with 75129 additions and 0 deletions

25
prelude/.buckconfig Normal file
View File

@ -0,0 +1,25 @@
[repositories]
prelude = .
# We want to disable the following values when we don't have open source,
# but our custom config format (yuk) doesn't accept inline comments.
# Therefore, we hide the name of the group when not open source.
[not_repositories] # @oss-enable
fbcode = ../..
fbsource = ../../..
ovr_config = ../../../arvr/tools/build_defs/config
bazel_skylib = ../../../third-party/bazel-skylib
fbcode_macros = ../../../tools/build_defs/fbcode_macros
fbobjc_dylibs = ../../../xplat/configurations/buck/apple/dylibs
buck = ../../../xplat/build_infra/buck_client
buck_bazel_skylib = ../../../xplat/build_infra/buck_client/third-party/skylark/bazel-skylib
toolchains = ../toolchains
[repository_aliases]
[not_repository_aliases] # @oss-enable
config = ovr_config
[buildfile]
[not_buildfile] # @oss-enable
name = TARGETS

0
prelude/.gitignore vendored Normal file
View File

12
prelude/BUCK Normal file
View File

@ -0,0 +1,12 @@
# Done to avoid triggering a lint rule that replaces glob with an fbcode macro
load(":defs.bzl", "export_prelude")
globby = glob
srcs = globby(
["**"],
# Context: https://fb.workplace.com/groups/buck2users/posts/3121903854732641/
exclude = ["**/.pyre_configuration.local"],
)
export_prelude(srcs = srcs)

3
prelude/CHANGELOG.md Normal file
View File

@ -0,0 +1,3 @@
# Buck2 Prelude
* Initial version.

View File

@ -0,0 +1,45 @@
# Open Source Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
Using welcoming and inclusive language
Being respectful of differing viewpoints and experiences
Gracefully accepting constructive criticism
Focusing on what is best for the community
Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
The use of sexualized language or imagery and unwelcome sexual attention or advances
Trolling, insulting/derogatory comments, and personal or political attacks
Public or private harassment
Publishing others private information, such as a physical or electronic address, without explicit permission
Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies within all project spaces, and it also applies when an individual is representing the project or its community in public spaces. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at opensource-conduct@fb.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the projects leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org

49
prelude/CONTRIBUTING.md Normal file
View File

@ -0,0 +1,49 @@
# Contributing to Buck2 Prelude
This repository is a subset of <https://github.com/facebook/buck2>.
You can contribute to either that repo, or this repo - changes will be mirrored to both.
We want to make contributing to this project as easy and transparent as possible.
## Our Development Process
Buck2 Prelude is currently developed in Facebook's internal repositories and then exported
out to GitHub by a Facebook team member; however, we invite you to submit pull
requests as described below.
## Pull Requests
We actively welcome your pull requests.
1. Fork the repo and create your branch from `main`.
2. If you've added code that should be tested, add tests.
3. If you've changed APIs, update the documentation.
4. Ensure the test suite passes.
5. Make sure your code lints.
6. If you haven't already, complete the Contributor License Agreement ("CLA").
## Contributor License Agreement ("CLA")
In order to accept your pull request, we need you to submit a CLA. You only need
to do this once to work on any of Facebook's open source projects.
Complete your CLA here: <https://code.facebook.com/cla>
## Issues
We use GitHub issues to track public bugs. Please ensure your description is
clear and has sufficient instructions to be able to reproduce the issue.
Facebook has a [bounty program](https://www.facebook.com/whitehat/) for the safe
disclosure of security bugs. In those cases, please go through the process
outlined on that page and do not file a public issue.
## Coding Style
We use several Python formatters.
## License
By contributing to Buck2 Prelude, you agree that your contributions will be
licensed under both the [LICENSE-MIT](LICENSE-MIT) and [LICENSE-APACHE](LICENSE-APACHE)
files in the root directory of this source tree.

201
prelude/LICENSE-APACHE Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

21
prelude/LICENSE-MIT Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) Meta Platforms, Inc. and affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

12
prelude/README.md Normal file
View File

@ -0,0 +1,12 @@
# Buck2 Prelude
This repo contains a copy of the Buck2 Prelude, which is often included as a submodule with a Buck2 project.
To obtain a copy of this repo, and set up other details of a Buck2, you should usually run `buck2 init --git`.
Most information can be found on the main [Buck2 GitHub project](https://github.com/facebook/buck2).
Pull requests and issues should be raised at [facebook/buck2](https://github.com/facebook/buck2) as that project
is more closely monitored and contains CI checks.
## License
Buck2 Prelude is both MIT and Apache License, Version 2.0 licensed, as found in the [LICENSE-MIT](LICENSE-MIT) and [LICENSE-APACHE](LICENSE-APACHE) files.

22
prelude/alias.bzl Normal file
View File

@ -0,0 +1,22 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
# Implementation of aliases build rules.
def alias_impl(ctx: "context") -> ["provider"]:
return ctx.attrs.actual.providers
def configured_alias_impl(ctx: "context") -> ["provider"]:
if ctx.attrs.configured_actual != None:
return ctx.attrs.configured_actual.providers
if ctx.attrs.fallback_actual != None:
return ctx.attrs.fallback_actual.providers
fail("must set one of `configured_actual` or `fallback_actual`")
def versioned_alias_impl(_ctx: "context") -> ["provider"]:
# Should be intercepted in macro stub and converted to `alias`.
fail("unsupported")

View File

@ -0,0 +1,117 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_providers.bzl", "Aapt2LinkInfo")
BASE_PACKAGE_ID = 0x7f
def get_aapt2_link(
ctx: "context",
android_toolchain: "AndroidToolchainInfo",
aapt2_compile_rules: ["artifact"],
android_manifest: "artifact",
includes_vector_drawables: bool.type,
no_auto_version: bool.type,
no_version_transitions: bool.type,
no_auto_add_overlay: bool.type,
use_proto_format: bool.type,
no_resource_removal: bool.type,
should_keep_raw_values: bool.type,
package_id_offset: int.type,
resource_stable_ids: ["artifact", None],
preferred_density: [str.type, None],
min_sdk: [str.type, None],
filter_locales: bool.type,
locales: [str.type],
compiled_resource_apks: ["artifact"],
additional_aapt2_params: [str.type],
extra_filtered_resources: [str.type]) -> Aapt2LinkInfo.type:
aapt2_command = cmd_args(android_toolchain.aapt2)
aapt2_command.add("link")
# aapt2 only supports @ for -R or input files, not for all args, so we pass in all "normal"
# args here.
resources_apk = ctx.actions.declare_output("resource-apk.ap_")
aapt2_command.add(["-o", resources_apk.as_output()])
proguard_config = ctx.actions.declare_output("proguard_config.pro")
aapt2_command.add(["--proguard", proguard_config.as_output()])
# We don't need the R.java output, but aapt2 won't output R.txt unless we also request R.java.
r_dot_java = ctx.actions.declare_output("initial-rdotjava", dir = True)
aapt2_command.add(["--java", r_dot_java.as_output()])
r_dot_txt = ctx.actions.declare_output("R.txt")
aapt2_command.add(["--output-text-symbols", r_dot_txt.as_output()])
aapt2_command.add(["--manifest", android_manifest])
aapt2_command.add(["-I", android_toolchain.android_jar])
if includes_vector_drawables:
aapt2_command.add("--no-version-vectors")
if no_auto_version:
aapt2_command.add("--no-auto-version")
if no_version_transitions:
aapt2_command.add("--no-version-transitions")
if not no_auto_add_overlay:
aapt2_command.add("--auto-add-overlay")
if use_proto_format:
aapt2_command.add("--proto-format")
if no_resource_removal:
aapt2_command.add("--no-resource-removal")
if should_keep_raw_values:
aapt2_command.add("--keep-raw-values")
if package_id_offset != 0:
aapt2_command.add(["--package-id", "0x{}".format(BASE_PACKAGE_ID + package_id_offset)])
if resource_stable_ids != None:
aapt2_command.add(["--stable-ids", resource_stable_ids])
if preferred_density != None:
aapt2_command.add(["--preferred-density", preferred_density])
if min_sdk != None:
aapt2_command.add(["--min-sdk-version", min_sdk])
if filter_locales and len(locales) > 0:
aapt2_command.add("-c")
# "NONE" means "en", update the list of locales
aapt2_command.add(cmd_args([locale if locale != "NONE" else "en" for locale in locales], delimiter = ","))
for compiled_resource_apk in compiled_resource_apks:
aapt2_command.add(["-I", compiled_resource_apk])
aapt2_compile_rules_args_file = ctx.actions.write("aapt2_compile_rules_args_file", cmd_args(aapt2_compile_rules, delimiter = " "))
aapt2_command.add("-R")
aapt2_command.add(cmd_args(aapt2_compile_rules_args_file, format = "@{}"))
aapt2_command.hidden(aapt2_compile_rules)
aapt2_command.add(additional_aapt2_params)
ctx.actions.run(aapt2_command, category = "aapt2_link")
# The normal resource filtering apparatus is super slow, because it extracts the whole apk,
# strips files out of it, then repackages it.
#
# This is a faster filtering step that just uses zip -d to remove entries from the archive.
# It's also superbly dangerous.
if len(extra_filtered_resources) > 0:
filtered_resources_apk = ctx.actions.declare_output("filtered-resource-apk.ap_")
filter_resources_sh_cmd = cmd_args([
"sh",
"-c",
'cp "$1" "$2" && chmod 644 "$2" && zip -d "$2" "$3"',
"--",
resources_apk,
filtered_resources_apk.as_output(),
extra_filtered_resources,
])
ctx.actions.run(filter_resources_sh_cmd, category = "aapt2_filter_resources")
primary_resources_apk = filtered_resources_apk
else:
primary_resources_apk = resources_apk
return Aapt2LinkInfo(
primary_resources_apk = primary_resources_apk,
proguard_config_file = proguard_config,
r_dot_txt = r_dot_txt,
)

222
prelude/android/android.bzl Normal file
View File

@ -0,0 +1,222 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//java:dex_toolchain.bzl", "DexToolchainInfo")
load("@prelude//java:java.bzl", "AbiGenerationMode", "dex_min_sdk_version", "select_java_test_toolchain")
load("@prelude//java:java_toolchain.bzl", "JavaPlatformInfo", "JavaTestToolchainInfo", "JavaToolchainInfo")
load("@prelude//kotlin:kotlin_toolchain.bzl", "KotlinToolchainInfo")
load("@prelude//decls/android_rules.bzl", "AaptMode", "DuplicateResourceBehaviour", "TargetCpuType")
load("@prelude//genrule.bzl", "genrule_attributes")
load(":android_aar.bzl", "android_aar_impl")
load(":android_apk.bzl", "android_apk_impl")
load(":android_build_config.bzl", "android_build_config_impl")
load(":android_instrumentation_apk.bzl", "android_instrumentation_apk_impl")
load(":android_instrumentation_test.bzl", "android_instrumentation_test_impl")
load(":android_library.bzl", "android_library_impl")
load(":android_manifest.bzl", "android_manifest_impl")
load(":android_prebuilt_aar.bzl", "android_prebuilt_aar_impl")
load(":android_resource.bzl", "android_resource_impl")
load(":android_toolchain.bzl", "AndroidPlatformInfo", "AndroidToolchainInfo")
load(":apk_genrule.bzl", "apk_genrule_impl")
load(":configuration.bzl", "cpu_split_transition", "cpu_split_transition_instrumentation_test_apk", "cpu_transition", "do_not_build_only_native_code_transition", "is_building_android_binary_attr")
load(":gen_aidl.bzl", "gen_aidl_impl")
load(":prebuilt_native_library.bzl", "prebuilt_native_library_impl")
load(":robolectric_test.bzl", "robolectric_test_impl")
load(":voltron.bzl", "android_app_modularity_impl")
def android_toolchain():
return attrs.toolchain_dep(
# FIXME: prelude// should be standalone (not refer to fbcode//)
default = "fbcode//buck2/platform/toolchain:android",
providers = [
AndroidPlatformInfo,
AndroidToolchainInfo,
],
)
def _dex_toolchain():
return attrs.toolchain_dep(
# FIXME: prelude// should be standalone (not refer to fbcode//)
default = "fbcode//buck2/platform/toolchain:dex_for_android",
providers = [
DexToolchainInfo,
],
)
def java_toolchain_for_android():
return attrs.toolchain_dep(
# FIXME: prelude// should be standalone (not refer to fbcode//)
default = "fbcode//buck2/platform/toolchain:java_for_android",
providers = [
JavaPlatformInfo,
JavaToolchainInfo,
],
)
def java_toolchain_for_android_test():
return attrs.toolchain_dep(
# FIXME: prelude// should be standalone (not refer to fbcode//)
default = "fbcode//buck2/platform/toolchain:java_for_host_test",
providers = [
JavaPlatformInfo,
JavaToolchainInfo,
],
)
def _kotlin_toolchain():
return attrs.toolchain_dep(
# FIXME: prelude// should be standalone (not refer to fbcode//)
default = "fbcode//buck2/platform/toolchain:kotlin",
providers = [
KotlinToolchainInfo,
],
)
def is_build_only_native_code():
return select(
{
"DEFAULT": False,
"fbsource//xplat/buck2/platform/android:build_only_native_code": True,
},
)
implemented_rules = {
"android_aar": android_aar_impl,
"android_app_modularity": android_app_modularity_impl,
"android_binary": android_apk_impl,
"android_build_config": android_build_config_impl,
"android_instrumentation_apk": android_instrumentation_apk_impl,
"android_instrumentation_test": android_instrumentation_test_impl,
"android_library": android_library_impl,
"android_manifest": android_manifest_impl,
"android_prebuilt_aar": android_prebuilt_aar_impl,
"android_resource": android_resource_impl,
"apk_genrule": apk_genrule_impl,
"gen_aidl": gen_aidl_impl,
"prebuilt_native_library": prebuilt_native_library_impl,
"robolectric_test": robolectric_test_impl,
}
# Can't load `read_bool` here because it will cause circular load.
FORCE_SINGLE_CPU = read_config("buck2", "android_force_single_cpu") in ("True", "true")
FORCE_SINGLE_DEFAULT_CPU = read_config("buck2", "android_force_single_default_cpu") in ("True", "true")
extra_attributes = {
"android_aar": {
"abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None),
"resources_root": attrs.option(attrs.string(), default = None),
},
"android_app_modularity": {
"_android_toolchain": android_toolchain(),
"_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())),
},
"android_binary": {
"aapt_mode": attrs.enum(AaptMode, default = "aapt1"), # Match default in V1
"application_module_configs": attrs.dict(key = attrs.string(), value = attrs.list(attrs.transition_dep(cfg = cpu_transition)), sorted = False, default = {}),
"build_config_values_file": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None),
"deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition), default = []),
"dex_tool": attrs.string(default = "d8"), # Match default in V1
"duplicate_resource_behavior": attrs.enum(DuplicateResourceBehaviour, default = "allow_by_default"), # Match default in V1
"manifest": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None),
"manifest_skeleton": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None),
"min_sdk_version": attrs.option(attrs.int(), default = None),
"module_manifest_skeleton": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None),
"_android_toolchain": android_toolchain(),
"_dex_toolchain": _dex_toolchain(),
"_is_building_android_binary": attrs.default_only(attrs.bool(default = True)),
"_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)),
"_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)),
"_java_toolchain": java_toolchain_for_android(),
},
"android_build_config": {
"_android_toolchain": android_toolchain(),
"_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())),
"_is_building_android_binary": is_building_android_binary_attr(),
"_java_toolchain": java_toolchain_for_android(),
},
"android_instrumentation_apk": {
"aapt_mode": attrs.enum(AaptMode, default = "aapt1"), # Match default in V1
"apk": attrs.transition_dep(cfg = do_not_build_only_native_code_transition),
"cpu_filters": attrs.list(attrs.enum(TargetCpuType), default = []),
"deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition_instrumentation_test_apk), default = []),
"dex_tool": attrs.string(default = "d8"), # Match default in V1
"manifest": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None),
"manifest_skeleton": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None),
"min_sdk_version": attrs.option(attrs.int(), default = None),
"_android_toolchain": android_toolchain(),
"_dex_toolchain": _dex_toolchain(),
"_is_building_android_binary": attrs.default_only(attrs.bool(default = True)),
"_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)),
"_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)),
"_java_toolchain": java_toolchain_for_android(),
},
"android_instrumentation_test": {
"_android_toolchain": android_toolchain(),
"_java_toolchain": java_toolchain_for_android(),
},
"android_library": {
"abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None),
"resources_root": attrs.option(attrs.string(), default = None),
"_android_toolchain": android_toolchain(),
"_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())),
"_dex_min_sdk_version": attrs.default_only(attrs.option(attrs.int(), default = dex_min_sdk_version())),
"_dex_toolchain": _dex_toolchain(),
"_is_building_android_binary": is_building_android_binary_attr(),
"_java_toolchain": java_toolchain_for_android(),
"_kotlin_toolchain": _kotlin_toolchain(),
},
"android_manifest": {
"_android_toolchain": android_toolchain(),
},
"android_prebuilt_aar": {
# Prebuilt jars are quick to build, and often contain third-party code, which in turn is
# often a source of annotations and constants. To ease migration to ABI generation from
# source without deps, we have them present during ABI gen by default.
"required_for_source_only_abi": attrs.bool(default = True),
"_android_toolchain": android_toolchain(),
"_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())),
"_dex_min_sdk_version": attrs.default_only(attrs.option(attrs.int(), default = dex_min_sdk_version())),
"_dex_toolchain": _dex_toolchain(),
"_java_toolchain": java_toolchain_for_android(),
},
"android_resource": {
"assets": attrs.option(attrs.one_of(attrs.source(allow_directory = True), attrs.dict(key = attrs.string(), value = attrs.source(), sorted = True)), default = None),
"project_assets": attrs.option(attrs.source(allow_directory = True), default = None),
"project_res": attrs.option(attrs.source(allow_directory = True), default = None),
"res": attrs.option(attrs.one_of(attrs.source(allow_directory = True), attrs.dict(key = attrs.string(), value = attrs.source(), sorted = True)), default = None),
"_android_toolchain": android_toolchain(),
"_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())),
},
"apk_genrule": genrule_attributes() | {
"type": attrs.string(default = "apk"),
"_android_toolchain": android_toolchain(),
},
"gen_aidl": {
"import_paths": attrs.list(attrs.arg(), default = []),
"_android_toolchain": android_toolchain(),
"_java_toolchain": java_toolchain_for_android(),
},
"prebuilt_native_library": {
"native_libs": attrs.source(allow_directory = True),
},
"robolectric_test": {
"abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None),
"resources_root": attrs.option(attrs.string(), default = None),
"robolectric_runtime_dependencies": attrs.list(attrs.source(), default = []),
"_android_toolchain": android_toolchain(),
"_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())),
"_is_building_android_binary": attrs.default_only(attrs.bool(default = False)),
"_java_test_toolchain": attrs.default_only(attrs.exec_dep(
default = select_java_test_toolchain(),
providers = [
JavaTestToolchainInfo,
],
)),
"_java_toolchain": java_toolchain_for_android_test(),
"_kotlin_toolchain": _kotlin_toolchain(),
},
}

View File

@ -0,0 +1,10 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
def android_aar_impl(_ctx: "context") -> ["provider"]:
# TODO(T145318686)
return [DefaultInfo()]

View File

@ -0,0 +1,363 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_binary_native_library_rules.bzl", "get_android_binary_native_library_info")
load("@prelude//android:android_binary_resources_rules.bzl", "get_android_binary_resources_info")
load("@prelude//android:android_build_config.bzl", "generate_android_build_config", "get_build_config_fields")
load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidApkUnderTestInfo", "BuildConfigField", "ExopackageInfo", "merge_android_packageable_info")
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//android:configuration.bzl", "get_deps_by_platform")
load("@prelude//android:cpu_filters.bzl", "ALL_CPU_FILTERS")
load("@prelude//android:dex_rules.bzl", "get_multi_dex", "get_single_primary_dex", "get_split_dex_merge_config", "merge_to_single_dex", "merge_to_split_dex")
load("@prelude//android:exopackage.bzl", "get_exopackage_flags")
load("@prelude//android:preprocess_java_classes.bzl", "get_preprocessed_java_classes")
load("@prelude//android:proguard.bzl", "get_proguard_output")
load("@prelude//android:voltron.bzl", "get_target_to_module_mapping")
load("@prelude//java:java_providers.bzl", "KeystoreInfo", "create_java_packaging_dep", "get_all_java_packaging_deps", "get_all_java_packaging_deps_from_packaging_infos")
load("@prelude//java/utils:java_utils.bzl", "get_path_separator")
load("@prelude//utils:set.bzl", "set")
load("@prelude//utils:utils.bzl", "expect")
def android_apk_impl(ctx: "context") -> ["provider"]:
sub_targets = {}
_verify_params(ctx)
cpu_filters = ctx.attrs.cpu_filters or ALL_CPU_FILTERS
deps_by_platform = get_deps_by_platform(ctx)
primary_platform = cpu_filters[0]
deps = deps_by_platform[primary_platform]
target_to_module_mapping_file = get_target_to_module_mapping(ctx, deps)
no_dx_target_labels = [no_dx_target.label.raw_target() for no_dx_target in ctx.attrs.no_dx]
java_packaging_deps = [packaging_dep for packaging_dep in get_all_java_packaging_deps(ctx, deps)]
android_packageable_info = merge_android_packageable_info(ctx.label, ctx.actions, deps)
build_config_infos = list(android_packageable_info.build_config_infos.traverse()) if android_packageable_info.build_config_infos else []
build_config_libs = _get_build_config_java_libraries(ctx, build_config_infos)
java_packaging_deps += get_all_java_packaging_deps_from_packaging_infos(ctx, build_config_libs)
has_proguard_config = ctx.attrs.proguard_config != None or ctx.attrs.android_sdk_proguard_config == "default" or ctx.attrs.android_sdk_proguard_config == "optimized"
should_pre_dex = not ctx.attrs.disable_pre_dex and not has_proguard_config and not ctx.attrs.preprocess_java_classes_bash
referenced_resources_lists = [java_packaging_dep.dex.referenced_resources for java_packaging_dep in java_packaging_deps if java_packaging_dep.dex] if ctx.attrs.trim_resource_ids and should_pre_dex else []
resources_info = get_android_binary_resources_info(
ctx,
deps,
android_packageable_info,
java_packaging_deps,
apk_module_graph_file = target_to_module_mapping_file,
use_proto_format = False,
referenced_resources_lists = referenced_resources_lists,
manifest_entries = ctx.attrs.manifest_entries,
)
android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo]
java_packaging_deps += [
create_java_packaging_dep(
ctx,
r_dot_java.library_output.full_library,
dex_weight_factor = android_toolchain.r_dot_java_weight_factor,
)
for r_dot_java in resources_info.r_dot_javas
]
dex_java_packaging_deps = [packaging_dep for packaging_dep in java_packaging_deps if packaging_dep.dex and packaging_dep.dex.dex.owner.raw_target() not in no_dx_target_labels]
if should_pre_dex:
pre_dexed_libs = [packaging_dep.dex for packaging_dep in dex_java_packaging_deps]
if ctx.attrs.use_split_dex:
dex_files_info = merge_to_split_dex(
ctx,
android_toolchain,
pre_dexed_libs,
get_split_dex_merge_config(ctx, android_toolchain),
target_to_module_mapping_file,
)
else:
dex_files_info = merge_to_single_dex(ctx, android_toolchain, pre_dexed_libs)
else:
jars_to_owners = {packaging_dep.jar: packaging_dep.jar.owner.raw_target() for packaging_dep in dex_java_packaging_deps}
if ctx.attrs.preprocess_java_classes_bash:
jars_to_owners = get_preprocessed_java_classes(ctx, jars_to_owners)
if has_proguard_config:
proguard_output = get_proguard_output(ctx, jars_to_owners, java_packaging_deps, resources_info.proguard_config_file)
jars_to_owners = proguard_output.jars_to_owners
dir_srcs = {artifact.basename: artifact for artifact in proguard_output.proguard_artifacts}
for i, hidden_artifact in enumerate(proguard_output.proguard_hidden_artifacts):
dir_srcs["hidden/{}_{}".format(i, hidden_artifact.basename)] = hidden_artifact
sub_targets["proguard_text_output"] = [
DefaultInfo(
default_output = ctx.actions.symlinked_dir(
"proguard_text_output",
dir_srcs,
),
),
]
else:
proguard_output = None
if ctx.attrs.use_split_dex:
dex_files_info = get_multi_dex(
ctx,
ctx.attrs._android_toolchain[AndroidToolchainInfo],
jars_to_owners,
ctx.attrs.primary_dex_patterns,
proguard_output.proguard_configuration_output_file if proguard_output else None,
proguard_output.proguard_mapping_output_file if proguard_output else None,
is_optimized = has_proguard_config,
apk_module_graph_file = target_to_module_mapping_file,
)
else:
dex_files_info = get_single_primary_dex(
ctx,
ctx.attrs._android_toolchain[AndroidToolchainInfo],
jars_to_owners.keys(),
is_optimized = has_proguard_config,
)
native_library_info = get_android_binary_native_library_info(ctx, android_packageable_info, deps_by_platform, apk_module_graph_file = target_to_module_mapping_file)
unstripped_native_libs = native_library_info.unstripped_libs
sub_targets["unstripped_native_libraries"] = [
DefaultInfo(
default_output = ctx.actions.write("unstripped_native_libraries", unstripped_native_libs),
other_outputs = unstripped_native_libs,
),
]
if resources_info.string_source_map:
sub_targets["generate_string_resources"] = [DefaultInfo(default_output = resources_info.string_source_map)]
if resources_info.voltron_string_source_map:
sub_targets["generate_voltron_string_resources"] = [DefaultInfo(default_output = resources_info.voltron_string_source_map)]
if dex_files_info.primary_dex_class_names:
sub_targets["primary_dex_class_names"] = [DefaultInfo(default_output = dex_files_info.primary_dex_class_names)]
keystore = ctx.attrs.keystore[KeystoreInfo]
output_apk = build_apk(
label = ctx.label,
actions = ctx.actions,
android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo],
keystore = keystore,
dex_files_info = dex_files_info,
native_library_info = native_library_info,
resources_info = resources_info,
compress_resources_dot_arsc = ctx.attrs.resource_compression == "enabled" or ctx.attrs.resource_compression == "enabled_with_strings_as_assets",
)
exopackage_info = ExopackageInfo(
secondary_dex_info = dex_files_info.secondary_dex_exopackage_info,
native_library_info = native_library_info.exopackage_info,
resources_info = resources_info.exopackage_info,
)
return [
AndroidApkInfo(apk = output_apk, manifest = resources_info.manifest),
AndroidApkUnderTestInfo(
java_packaging_deps = set([dep.label.raw_target() for dep in java_packaging_deps]),
keystore = keystore,
manifest_entries = ctx.attrs.manifest_entries,
prebuilt_native_library_dirs = set([native_lib.raw_target for native_lib in native_library_info.apk_under_test_prebuilt_native_library_dirs]),
platforms = deps_by_platform.keys(),
primary_platform = primary_platform,
resource_infos = set([info.raw_target for info in resources_info.unfiltered_resource_infos]),
shared_libraries = set([shared_lib.label.raw_target() for shared_lib in native_library_info.apk_under_test_shared_libraries]),
),
DefaultInfo(default_output = output_apk, other_outputs = _get_exopackage_outputs(exopackage_info), sub_targets = sub_targets),
get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info),
TemplatePlaceholderInfo(
keyed_variables = {
"classpath": cmd_args([dep.jar for dep in java_packaging_deps if dep.jar], delimiter = get_path_separator()),
"classpath_including_targets_with_no_output": cmd_args([dep.output_for_classpath_macro for dep in java_packaging_deps], delimiter = get_path_separator()),
},
),
]
def build_apk(
label: "label",
actions: "actions",
keystore: KeystoreInfo.type,
android_toolchain: AndroidToolchainInfo.type,
dex_files_info: "DexFilesInfo",
native_library_info: "AndroidBinaryNativeLibsInfo",
resources_info: "AndroidBinaryResourcesInfo",
compress_resources_dot_arsc: bool.type = False) -> "artifact":
output_apk = actions.declare_output("{}.apk".format(label.name))
apk_builder_args = cmd_args([
android_toolchain.apk_builder[RunInfo],
"--output-apk",
output_apk.as_output(),
"--resource-apk",
resources_info.primary_resources_apk,
"--dex-file",
dex_files_info.primary_dex,
"--keystore-path",
keystore.store,
"--keystore-properties-path",
keystore.properties,
"--zipalign_tool",
android_toolchain.zipalign[RunInfo],
])
if compress_resources_dot_arsc:
apk_builder_args.add("--compress-resources-dot-arsc")
asset_directories = native_library_info.native_lib_assets + dex_files_info.secondary_dex_dirs + resources_info.module_manifests
asset_directories_file = actions.write("asset_directories.txt", asset_directories)
apk_builder_args.hidden(asset_directories)
native_library_directories = actions.write("native_library_directories", native_library_info.native_libs_for_primary_apk)
apk_builder_args.hidden(native_library_info.native_libs_for_primary_apk)
all_zip_files = [resources_info.packaged_string_assets] if resources_info.packaged_string_assets else []
zip_files = actions.write("zip_files", all_zip_files)
apk_builder_args.hidden(all_zip_files)
jar_files_that_may_contain_resources = actions.write("jar_files_that_may_contain_resources", resources_info.jar_files_that_may_contain_resources)
apk_builder_args.hidden(resources_info.jar_files_that_may_contain_resources)
apk_builder_args.add([
"--asset-directories-list",
asset_directories_file,
"--native-libraries-directories-list",
native_library_directories,
"--zip-files-list",
zip_files,
"--jar-files-that-may-contain-resources-list",
jar_files_that_may_contain_resources,
])
actions.run(apk_builder_args, category = "apk_build")
return output_apk
def get_install_info(ctx: "context", output_apk: "artifact", manifest: "artifact", exopackage_info: [ExopackageInfo.type, None]) -> InstallInfo.type:
files = {
ctx.attrs.name: output_apk,
"manifest": manifest,
"options": generate_install_config(ctx),
}
if exopackage_info:
secondary_dex_exopackage_info = exopackage_info.secondary_dex_info
native_library_exopackage_info = exopackage_info.native_library_info
resources_info = exopackage_info.resources_info
else:
secondary_dex_exopackage_info = None
native_library_exopackage_info = None
resources_info = None
if secondary_dex_exopackage_info:
files["secondary_dex_exopackage_info_directory"] = secondary_dex_exopackage_info.directory
files["secondary_dex_exopackage_info_metadata"] = secondary_dex_exopackage_info.metadata
if native_library_exopackage_info:
files["native_library_exopackage_info_directory"] = native_library_exopackage_info.directory
files["native_library_exopackage_info_metadata"] = native_library_exopackage_info.metadata
if resources_info:
if resources_info.assets:
files["resources_exopackage_assets"] = resources_info.assets
files["resources_exopackage_assets_hash"] = resources_info.assets_hash
files["resources_exopackage_res"] = resources_info.res
files["resources_exopackage_res_hash"] = resources_info.res_hash
files["resources_exopackage_third_party_jar_resources"] = resources_info.third_party_jar_resources
files["resources_exopackage_third_party_jar_resources_hash"] = resources_info.third_party_jar_resources_hash
if secondary_dex_exopackage_info or native_library_exopackage_info or resources_info:
files["exopackage_agent_apk"] = ctx.attrs._android_toolchain[AndroidToolchainInfo].exopackage_agent_apk
return InstallInfo(
installer = ctx.attrs._android_toolchain[AndroidToolchainInfo].installer,
files = files,
)
def _get_build_config_java_libraries(ctx: "context", build_config_infos: ["AndroidBuildConfigInfo"]) -> ["JavaPackagingInfo"]:
# BuildConfig deps should not be added for instrumented APKs because BuildConfig.class has
# already been added to the APK under test.
if ctx.attrs.package_type == "instrumented":
return []
build_config_constants = [
BuildConfigField(type = "boolean", name = "DEBUG", value = str(ctx.attrs.package_type != "release").lower()),
BuildConfigField(type = "boolean", name = "IS_EXOPACKAGE", value = str(len(ctx.attrs.exopackage_modes) > 0).lower()),
BuildConfigField(type = "int", name = "EXOPACKAGE_FLAGS", value = str(get_exopackage_flags(ctx.attrs.exopackage_modes))),
]
default_build_config_fields = get_build_config_fields(ctx.attrs.build_config_values)
java_libraries = []
java_packages_seen = []
for build_config_info in build_config_infos:
java_package = build_config_info.package
expect(java_package not in java_packages_seen, "Got the same java_package {} for different AndroidBuildConfigs".format(java_package))
java_packages_seen.append(java_package)
all_build_config_values = {}
for build_config_field in build_config_info.build_config_fields + default_build_config_fields + build_config_constants:
all_build_config_values[build_config_field.name] = build_config_field
java_libraries.append(generate_android_build_config(
ctx,
java_package,
java_package,
True, # use_constant_expressions
all_build_config_values.values(),
ctx.attrs.build_config_values_file[DefaultInfo].default_outputs[0] if type(ctx.attrs.build_config_values_file) == "dependency" else ctx.attrs.build_config_values_file,
)[1])
return java_libraries
def _get_exopackage_outputs(exopackage_info: ExopackageInfo.type) -> ["artifact"]:
outputs = []
secondary_dex_exopackage_info = exopackage_info.secondary_dex_info
if secondary_dex_exopackage_info:
outputs.append(secondary_dex_exopackage_info.metadata)
outputs.append(secondary_dex_exopackage_info.directory)
native_library_exopackage_info = exopackage_info.native_library_info
if native_library_exopackage_info:
outputs.append(native_library_exopackage_info.metadata)
outputs.append(native_library_exopackage_info.directory)
resources_info = exopackage_info.resources_info
if resources_info:
outputs.append(resources_info.res)
outputs.append(resources_info.res_hash)
outputs.append(resources_info.third_party_jar_resources)
outputs.append(resources_info.third_party_jar_resources_hash)
if resources_info.assets:
outputs.append(resources_info.assets)
outputs.append(resources_info.assets_hash)
return outputs
def _verify_params(ctx: "context"):
expect(ctx.attrs.aapt_mode == "aapt2", "aapt1 is deprecated!")
expect(ctx.attrs.dex_tool == "d8", "dx is deprecated!")
expect(not ctx.attrs.use_split_dex or ctx.attrs.allow_r_dot_java_in_secondary_dex)
def generate_install_config(ctx: "context") -> "artifact":
data = get_install_config()
return ctx.actions.write_json("install_android_options.json", data)
def get_install_config() -> {str.type: ""}:
# TODO: read from toolchains
install_config = {
"adb_restart_on_failure": read_config("adb", "adb_restart_on_failure", "true"),
"agent_port_base": read_config("adb", "agent_port_base", "2828"),
"always_use_java_agent": read_config("adb", "always_use_java_agent", "false"),
"is_zstd_compression_enabled": read_config("adb", "is_zstd_compression_enabled", "false"),
"multi_install_mode": read_config("adb", "multi_install_mode", "false"),
"skip_install_metadata": read_config("adb", "skip_install_metadata", "false"),
}
adb_executable = read_config("android", "adb", None)
if adb_executable:
install_config["adb_executable"] = adb_executable
return install_config

View File

@ -0,0 +1,414 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:paths.bzl", "paths")
load("@prelude//android:android_providers.bzl", "AndroidBinaryNativeLibsInfo", "ExopackageNativeInfo")
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//android:cpu_filters.bzl", "CPU_FILTER_TO_ABI_DIRECTORY")
load("@prelude//android:voltron.bzl", "ROOT_MODULE", "all_targets_in_root_module", "get_apk_module_graph_info", "is_root_module")
load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "merge_shared_libraries", "traverse_shared_library_info")
load("@prelude//utils:set.bzl", "set_type") # @unused Used as a type
load("@prelude//utils:utils.bzl", "expect")
# Native libraries on Android are built for a particular Application Binary Interface (ABI). We
# package native libraries for one (or more, for multi-arch builds) ABIs into an Android APK.
#
# Our native libraries come from two sources:
# 1. "Prebuilt native library dirs", which are directory artifacts whose sub-directories are ABIs,
# and those ABI subdirectories contain native libraries. These come from `android_prebuilt_aar`s
# and `prebuilt_native_library`s, for example.
# 2. "Native linkables". These are each a single shared library - `.so`s for one particular ABI.
#
# Native libraries can be packaged into Android APKs in two ways.
# 1. As native libraries. This means that they are passed to the APK builder as native libraries,
# and the APK builder will package `<ABI>/library.so` into the APK at `libs/<ABI>/library.so`.
# 2. As assets. These are passed to the APK build as assets, and are stored at
# `assets/lib/<ABI>/library.so` In the root module, we only package a native library as an
# asset if it is eligible to be an asset (e.g. `can_be_asset` on a `cxx_library`), and
# `package_asset_libraries` is set to True for the APK. We will additionally compress all the
# assets into a single `assets/lib/libs.xz` (or `assets/libs/libs.zstd` for `zstd` compression)
# if `compress_asset_libraries` is set to True for the APK. Regardless of whether we compress
# the assets or not, we create a metadata file at `assets/libs/metadata.txt` that has a single
# line entry for each packaged asset consisting of '<ABI/library_name> <file_size> <sha256>'.
#
# Any native library that is not part of the root module (i.e. it is part of some other Voltron
# module) is automatically packaged as an asset, and the assets for each module are compressed
# to a single `assets/<module_name>/libs.xz`. Similarly, the metadata for each module is stored
# at `assets/<module_name>/libs.txt`.
def get_android_binary_native_library_info(
ctx: "context",
android_packageable_info: "AndroidPackageableInfo",
deps_by_platform: {str.type: ["dependency"]},
apk_module_graph_file: ["artifact", None] = None,
prebuilt_native_library_dirs_to_exclude: [set_type, None] = None,
shared_libraries_to_exclude: [set_type, None] = None) -> AndroidBinaryNativeLibsInfo.type:
traversed_prebuilt_native_library_dirs = android_packageable_info.prebuilt_native_library_dirs.traverse() if android_packageable_info.prebuilt_native_library_dirs else []
all_prebuilt_native_library_dirs = [
native_lib
for native_lib in traversed_prebuilt_native_library_dirs
if not (prebuilt_native_library_dirs_to_exclude and prebuilt_native_library_dirs_to_exclude.contains(native_lib.raw_target))
]
unstripped_libs = []
all_shared_libraries = []
platform_to_native_linkables = {}
for platform, deps in deps_by_platform.items():
shared_library_info = merge_shared_libraries(
ctx.actions,
deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]),
)
native_linkables = {
so_name: shared_lib
for so_name, shared_lib in traverse_shared_library_info(shared_library_info).items()
if not (shared_libraries_to_exclude and shared_libraries_to_exclude.contains(shared_lib.label.raw_target()))
}
all_shared_libraries.extend(native_linkables.values())
unstripped_libs += [shared_lib.lib.output for shared_lib in native_linkables.values()]
platform_to_native_linkables[platform] = native_linkables
if apk_module_graph_file == None:
native_libs_and_assets_info = _get_native_libs_and_assets(
ctx,
all_targets_in_root_module,
all_prebuilt_native_library_dirs,
platform_to_native_linkables,
)
native_libs_for_primary_apk, exopackage_info = _get_exopackage_info(
ctx,
native_libs_and_assets_info.native_libs_always_in_primary_apk,
native_libs_and_assets_info.native_libs,
native_libs_and_assets_info.native_libs_metadata,
)
native_lib_assets = filter(None, [
native_libs_and_assets_info.native_lib_assets_for_primary_apk,
native_libs_and_assets_info.stripped_native_linkable_assets_for_primary_apk,
native_libs_and_assets_info.metadata_assets,
native_libs_and_assets_info.compressed_lib_assets,
])
return AndroidBinaryNativeLibsInfo(
apk_under_test_prebuilt_native_library_dirs = all_prebuilt_native_library_dirs,
apk_under_test_shared_libraries = all_shared_libraries,
native_libs_for_primary_apk = native_libs_for_primary_apk,
exopackage_info = exopackage_info,
unstripped_libs = unstripped_libs,
native_lib_assets = native_lib_assets,
)
else:
native_libs = ctx.actions.declare_output("native_libs_symlink")
native_libs_metadata = ctx.actions.declare_output("native_libs_metadata_symlink")
native_libs_always_in_primary_apk = ctx.actions.declare_output("native_libs_always_in_primary_apk_symlink")
native_lib_assets_for_primary_apk = ctx.actions.declare_output("native_lib_assets_for_primary_apk_symlink")
stripped_native_linkable_assets_for_primary_apk = ctx.actions.declare_output("stripped_native_linkable_assets_for_primary_apk_symlink")
metadata_assets = ctx.actions.declare_output("metadata_assets_symlink")
compressed_lib_assets = ctx.actions.declare_output("compressed_lib_assets_symlink")
outputs = [
native_libs,
native_libs_metadata,
native_libs_always_in_primary_apk,
native_lib_assets_for_primary_apk,
stripped_native_linkable_assets_for_primary_apk,
metadata_assets,
compressed_lib_assets,
]
def get_native_libs_info_modular(ctx: "context", artifacts, outputs):
get_module_from_target = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts).target_to_module_mapping_function
dynamic_info = _get_native_libs_and_assets(
ctx,
get_module_from_target,
all_prebuilt_native_library_dirs,
platform_to_native_linkables,
)
# Since we are using a dynamic action, we need to declare the outputs in advance.
# Rather than passing the created outputs into `_get_native_libs_and_assets`, we
# just symlink to the outputs that function produces.
ctx.actions.symlink_file(outputs[native_libs], dynamic_info.native_libs)
ctx.actions.symlink_file(outputs[native_libs_metadata], dynamic_info.native_libs_metadata)
ctx.actions.symlink_file(outputs[native_libs_always_in_primary_apk], dynamic_info.native_libs_always_in_primary_apk)
ctx.actions.symlink_file(outputs[native_lib_assets_for_primary_apk], dynamic_info.native_lib_assets_for_primary_apk if dynamic_info.native_lib_assets_for_primary_apk else ctx.actions.symlinked_dir("empty_native_lib_assets", {}))
ctx.actions.symlink_file(outputs[stripped_native_linkable_assets_for_primary_apk], dynamic_info.stripped_native_linkable_assets_for_primary_apk if dynamic_info.stripped_native_linkable_assets_for_primary_apk else ctx.actions.symlinked_dir("empty_stripped_native_linkable_assets", {}))
ctx.actions.symlink_file(outputs[metadata_assets], dynamic_info.metadata_assets)
ctx.actions.symlink_file(outputs[compressed_lib_assets], dynamic_info.compressed_lib_assets)
ctx.actions.dynamic_output(dynamic = [apk_module_graph_file], inputs = [], outputs = outputs, f = get_native_libs_info_modular)
native_libs_for_primary_apk, exopackage_info = _get_exopackage_info(ctx, native_libs_always_in_primary_apk, native_libs, native_libs_metadata)
return AndroidBinaryNativeLibsInfo(
apk_under_test_prebuilt_native_library_dirs = all_prebuilt_native_library_dirs,
apk_under_test_shared_libraries = all_shared_libraries,
native_libs_for_primary_apk = native_libs_for_primary_apk,
exopackage_info = exopackage_info,
unstripped_libs = unstripped_libs,
native_lib_assets = [native_lib_assets_for_primary_apk, stripped_native_linkable_assets_for_primary_apk, metadata_assets, compressed_lib_assets],
)
# We could just return two artifacts of libs (one for the primary APK, one which can go
# either into the primary APK or be exopackaged), and one artifact of assets,
# but we'd need an extra action in order to combine them (we can't use `symlinked_dir` since
# the paths overlap) so it's easier to just be explicit about exactly what we produce.
_NativeLibsAndAssetsInfo = record(
native_libs = "artifact",
native_libs_metadata = "artifact",
native_libs_always_in_primary_apk = "artifact",
native_lib_assets_for_primary_apk = ["artifact", None],
stripped_native_linkable_assets_for_primary_apk = ["artifact", None],
metadata_assets = "artifact",
compressed_lib_assets = "artifact",
)
def _get_exopackage_info(
ctx: "context",
native_libs_always_in_primary_apk: "artifact",
native_libs: "artifact",
native_libs_metadata: "artifact") -> (["artifact"], [ExopackageNativeInfo.type, None]):
is_exopackage_enabled_for_native_libs = "native_library" in getattr(ctx.attrs, "exopackage_modes", [])
if is_exopackage_enabled_for_native_libs:
return [native_libs_always_in_primary_apk], ExopackageNativeInfo(directory = native_libs, metadata = native_libs_metadata)
else:
return [native_libs, native_libs_always_in_primary_apk], None
def _get_native_libs_and_assets(
ctx: "context",
get_module_from_target: "function",
all_prebuilt_native_library_dirs: ["PrebuiltNativeLibraryDir"],
platform_to_native_linkables: {str.type: {str.type: "SharedLibrary"}}) -> _NativeLibsAndAssetsInfo.type:
is_packaging_native_libs_as_assets_supported = getattr(ctx.attrs, "package_asset_libraries", False)
prebuilt_native_library_dirs = []
prebuilt_native_library_dirs_always_in_primary_apk = []
prebuilt_native_library_dir_assets_for_primary_apk = []
prebuilt_native_library_dir_module_assets_map = {}
for native_lib in all_prebuilt_native_library_dirs:
native_lib_target = str(native_lib.raw_target)
module = get_module_from_target(native_lib_target)
if not is_root_module(module):
# In buck1, we always package native libs as assets when they are not in the root module
expect(not native_lib.for_primary_apk, "{} which is marked as needing to be in the primary APK cannot be included in non-root-module {}".format(native_lib_target, module))
prebuilt_native_library_dir_module_assets_map.setdefault(module, []).append(native_lib)
elif native_lib.is_asset and is_packaging_native_libs_as_assets_supported:
expect(not native_lib.for_primary_apk, "{} which is marked as needing to be in the primary APK cannot be an asset".format(native_lib_target))
prebuilt_native_library_dir_assets_for_primary_apk.append(native_lib)
elif native_lib.for_primary_apk:
prebuilt_native_library_dirs_always_in_primary_apk.append(native_lib)
else:
prebuilt_native_library_dirs.append(native_lib)
native_libs = _filter_prebuilt_native_library_dir(
ctx,
prebuilt_native_library_dirs,
"native_libs",
)
native_libs_always_in_primary_apk = _filter_prebuilt_native_library_dir(
ctx,
prebuilt_native_library_dirs_always_in_primary_apk,
"native_libs_always_in_primary_apk",
)
native_lib_assets_for_primary_apk = _filter_prebuilt_native_library_dir(
ctx,
prebuilt_native_library_dir_assets_for_primary_apk,
"native_lib_assets_for_primary_apk",
package_as_assets = True,
module = ROOT_MODULE,
) if prebuilt_native_library_dir_assets_for_primary_apk else None
native_lib_module_assets_map = {}
for module, native_lib_dir in prebuilt_native_library_dir_module_assets_map.items():
native_lib_module_assets_map[module] = [_filter_prebuilt_native_library_dir(
ctx,
native_lib_dir,
"native_lib_assets_for_module_{}".format(module),
package_as_assets = True,
module = module,
)]
(
stripped_native_linkables,
stripped_native_linkables_always_in_primary_apk,
stripped_native_linkable_assets_for_primary_apk,
stripped_native_linkable_module_assets_map,
) = _get_native_linkables(ctx, platform_to_native_linkables, get_module_from_target, is_packaging_native_libs_as_assets_supported)
for module, native_linkable_assets in stripped_native_linkable_module_assets_map.items():
native_lib_module_assets_map.setdefault(module, []).append(native_linkable_assets)
metadata_srcs = {}
compressed_lib_srcs = {}
assets_for_primary_apk = filter(None, [native_lib_assets_for_primary_apk, stripped_native_linkable_assets_for_primary_apk])
if assets_for_primary_apk:
metadata_file, native_library_paths = _get_native_libs_as_assets_metadata(ctx, assets_for_primary_apk, ROOT_MODULE)
metadata_srcs[paths.join(_get_native_libs_as_assets_dir(ROOT_MODULE), "metadata.txt")] = metadata_file
if ctx.attrs.compress_asset_libraries:
compressed_lib_dir = _get_compressed_native_libs_as_assets(ctx, assets_for_primary_apk, native_library_paths, ROOT_MODULE)
compressed_lib_srcs[_get_native_libs_as_assets_dir(ROOT_MODULE)] = compressed_lib_dir
# Since we're storing these as compressed assets, we need to ignore the uncompressed libs.
native_lib_assets_for_primary_apk = None
stripped_native_linkable_assets_for_primary_apk = None
for module, native_lib_assets in native_lib_module_assets_map.items():
metadata_file, native_library_paths = _get_native_libs_as_assets_metadata(ctx, native_lib_assets, module)
metadata_srcs[paths.join(_get_native_libs_as_assets_dir(module), "libs.txt")] = metadata_file
compressed_lib_dir = _get_compressed_native_libs_as_assets(ctx, native_lib_assets, native_library_paths, module)
compressed_lib_srcs[_get_native_libs_as_assets_dir(module)] = compressed_lib_dir
combined_native_libs = ctx.actions.declare_output("combined_native_libs", dir = True)
native_libs_metadata = ctx.actions.declare_output("native_libs_metadata.txt")
ctx.actions.run(cmd_args([
ctx.attrs._android_toolchain[AndroidToolchainInfo].combine_native_library_dirs[RunInfo],
"--output-dir",
combined_native_libs.as_output(),
"--library-dirs",
native_libs,
stripped_native_linkables,
"--metadata-file",
native_libs_metadata.as_output(),
]), category = "combine_native_libs")
combined_native_libs_always_in_primary_apk = ctx.actions.declare_output("combined_native_libs_always_in_primary_apk", dir = True)
ctx.actions.run(cmd_args([
ctx.attrs._android_toolchain[AndroidToolchainInfo].combine_native_library_dirs[RunInfo],
"--output-dir",
combined_native_libs_always_in_primary_apk.as_output(),
"--library-dirs",
native_libs_always_in_primary_apk,
stripped_native_linkables_always_in_primary_apk,
]), category = "combine_native_libs_always_in_primary_apk")
return _NativeLibsAndAssetsInfo(
native_libs = combined_native_libs,
native_libs_metadata = native_libs_metadata,
native_libs_always_in_primary_apk = combined_native_libs_always_in_primary_apk,
native_lib_assets_for_primary_apk = native_lib_assets_for_primary_apk,
stripped_native_linkable_assets_for_primary_apk = stripped_native_linkable_assets_for_primary_apk,
metadata_assets = ctx.actions.symlinked_dir("metadata_assets", metadata_srcs),
compressed_lib_assets = ctx.actions.symlinked_dir("compressed_lib_assets", compressed_lib_srcs),
)
def _filter_prebuilt_native_library_dir(
ctx: "context",
native_libs: ["PrebuiltNativeLibraryDir"],
identifier: str.type,
package_as_assets: bool.type = False,
module: str.type = ROOT_MODULE) -> "artifact":
cpu_filters = ctx.attrs.cpu_filters or CPU_FILTER_TO_ABI_DIRECTORY.keys()
abis = [CPU_FILTER_TO_ABI_DIRECTORY[cpu] for cpu in cpu_filters]
filter_tool = ctx.attrs._android_toolchain[AndroidToolchainInfo].filter_prebuilt_native_library_dir[RunInfo]
native_libs_dirs = [native_lib.dir for native_lib in native_libs]
native_libs_dirs_file = ctx.actions.write("{}_list.txt".format(identifier), native_libs_dirs)
base_output_dir = ctx.actions.declare_output(identifier, dir = True)
output_dir = base_output_dir.project(_get_native_libs_as_assets_dir(module)) if package_as_assets else base_output_dir
ctx.actions.run(
cmd_args([filter_tool, native_libs_dirs_file, output_dir.as_output(), "--abis"] + abis).hidden(native_libs_dirs),
category = "filter_prebuilt_native_library_dir",
identifier = identifier,
)
return base_output_dir
def _get_native_linkables(
ctx: "context",
platform_to_native_linkables: {str.type: {str.type: "SharedLibrary"}},
get_module_from_target: "function",
package_native_libs_as_assets_enabled: bool.type) -> ("artifact", "artifact", ["artifact", None], {str.type: "artifact"}):
stripped_native_linkables_srcs = {}
stripped_native_linkables_always_in_primary_apk_srcs = {}
stripped_native_linkable_assets_for_primary_apk_srcs = {}
stripped_native_linkable_module_assets_srcs = {}
cpu_filters = ctx.attrs.cpu_filters
for platform, native_linkables in platform_to_native_linkables.items():
if cpu_filters and platform not in cpu_filters:
fail("Platform `{}` is not in the CPU filters `{}`".format(platform, cpu_filters))
abi_directory = CPU_FILTER_TO_ABI_DIRECTORY[platform]
for so_name, native_linkable in native_linkables.items():
native_linkable_target = str(native_linkable.label.raw_target())
module = get_module_from_target(native_linkable_target)
if not is_root_module(module):
expect(not native_linkable.for_primary_apk, "{} which is marked as needing to be in the primary APK cannot be included in non-root-module {}".format(native_linkable_target, module))
so_name_path = paths.join(_get_native_libs_as_assets_dir(module), abi_directory, so_name)
stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = native_linkable.stripped_lib
elif native_linkable.can_be_asset and package_native_libs_as_assets_enabled:
expect(not native_linkable.for_primary_apk, "{} which is marked as needing to be in the primary APK cannot be an asset".format(native_linkable_target))
so_name_path = paths.join(_get_native_libs_as_assets_dir(module), abi_directory, so_name)
stripped_native_linkable_assets_for_primary_apk_srcs[so_name_path] = native_linkable.stripped_lib
else:
so_name_path = paths.join(abi_directory, so_name)
if native_linkable.for_primary_apk:
stripped_native_linkables_always_in_primary_apk_srcs[so_name_path] = native_linkable.stripped_lib
else:
stripped_native_linkables_srcs[so_name_path] = native_linkable.stripped_lib
stripped_native_linkables = ctx.actions.symlinked_dir(
"stripped_native_linkables",
stripped_native_linkables_srcs,
)
stripped_native_linkables_always_in_primary_apk = ctx.actions.symlinked_dir(
"stripped_native_linkables_always_in_primary_apk",
stripped_native_linkables_always_in_primary_apk_srcs,
)
stripped_native_linkable_assets_for_primary_apk = ctx.actions.symlinked_dir(
"stripped_native_linkables_assets_for_primary_apk",
stripped_native_linkable_assets_for_primary_apk_srcs,
) if stripped_native_linkable_assets_for_primary_apk_srcs else None
stripped_native_linkable_module_assets_map = {}
for module, srcs in stripped_native_linkable_module_assets_srcs.items():
stripped_native_linkable_module_assets_map[module] = ctx.actions.symlinked_dir(
"stripped_native_linkable_assets_for_module_{}".format(module),
srcs,
)
return (
stripped_native_linkables,
stripped_native_linkables_always_in_primary_apk,
stripped_native_linkable_assets_for_primary_apk,
stripped_native_linkable_module_assets_map,
)
def _get_native_libs_as_assets_metadata(
ctx: "context",
native_lib_assets: ["artifact"],
module: str.type) -> ("artifact", "artifact"):
native_lib_assets_file = ctx.actions.write("{}/native_lib_assets".format(module), [cmd_args([native_lib_asset, _get_native_libs_as_assets_dir(module)], delimiter = "/") for native_lib_asset in native_lib_assets])
metadata_output = ctx.actions.declare_output("{}/native_libs_as_assets_metadata.txt".format(module))
native_library_paths = ctx.actions.declare_output("{}/native_libs_as_assets_paths.txt".format(module))
metadata_cmd = cmd_args([
ctx.attrs._android_toolchain[AndroidToolchainInfo].native_libs_as_assets_metadata[RunInfo],
"--native-library-dirs",
native_lib_assets_file,
"--metadata-output",
metadata_output.as_output(),
"--native-library-paths-output",
native_library_paths.as_output(),
]).hidden(native_lib_assets)
ctx.actions.run(metadata_cmd, category = "get_native_libs_as_assets_metadata", identifier = module)
return metadata_output, native_library_paths
def _get_compressed_native_libs_as_assets(
ctx: "context",
native_lib_assets: ["artifact"],
native_library_paths: "artifact",
module: str.type) -> "artifact":
output_dir = ctx.actions.declare_output("{}/compressed_native_libs_as_assets_dir".format(module))
compressed_libraries_cmd = cmd_args([
ctx.attrs._android_toolchain[AndroidToolchainInfo].compress_libraries[RunInfo],
"--libraries",
native_library_paths,
"--output-dir",
output_dir.as_output(),
"--compression-type",
ctx.attrs.asset_compression_algorithm or "xz",
"--xz-compression-level",
str(ctx.attrs.xz_compression_level),
]).hidden(native_lib_assets)
ctx.actions.run(compressed_libraries_cmd, category = "compress_native_libs_as_assets", identifier = module)
return output_dir
def _get_native_libs_as_assets_dir(module: str.type) -> str.type:
return "assets/{}".format("lib" if is_root_module(module) else module)

View File

@ -0,0 +1,590 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:resources.bzl", "gather_resources")
load("@prelude//android:aapt2_link.bzl", "get_aapt2_link")
load("@prelude//android:android_manifest.bzl", "generate_android_manifest")
load("@prelude//android:android_providers.bzl", "AndroidBinaryResourcesInfo", "AndroidResourceInfo", "ExopackageResourcesInfo")
load("@prelude//android:android_resource.bzl", "aapt2_compile")
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//android:r_dot_java.bzl", "generate_r_dot_javas")
load("@prelude//android:voltron.bzl", "ROOT_MODULE", "get_apk_module_graph_info", "is_root_module")
load("@prelude//utils:set.bzl", "set_type") # @unused Used as a type
load("@prelude//utils:utils.bzl", "expect")
load("@prelude//decls/android_rules.bzl", "RType")
def get_android_binary_resources_info(
ctx: "context",
deps: ["dependency"],
android_packageable_info: "AndroidPackageableInfo",
java_packaging_deps: ["JavaPackagingDep"],
use_proto_format: bool.type,
referenced_resources_lists: ["artifact"],
apk_module_graph_file: ["artifact", None] = None,
manifest_entries: dict.type = {},
resource_infos_to_exclude: [set_type, None] = None,
generate_strings_and_ids_separately: [bool.type, None] = True,
aapt2_min_sdk: [str.type, None] = None,
aapt2_preferred_density: [str.type, None] = None) -> "AndroidBinaryResourcesInfo":
android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo]
unfiltered_resource_infos = [
resource_info
for resource_info in list(android_packageable_info.resource_infos.traverse() if android_packageable_info.resource_infos else [])
if not (resource_infos_to_exclude and resource_infos_to_exclude.contains(resource_info.raw_target))
]
resource_infos, override_symbols, string_files_list, string_files_res_dirs = _maybe_filter_resources(
ctx,
unfiltered_resource_infos,
android_toolchain,
)
android_manifest = _get_manifest(ctx, android_packageable_info, manifest_entries)
module_manifests = _get_module_manifests(ctx, android_packageable_info, manifest_entries, apk_module_graph_file)
aapt2_link_info = get_aapt2_link(
ctx,
ctx.attrs._android_toolchain[AndroidToolchainInfo],
[resource_info.aapt2_compile_output for resource_info in resource_infos if resource_info.aapt2_compile_output != None],
android_manifest,
includes_vector_drawables = getattr(ctx.attrs, "includes_vector_drawables", False),
no_auto_version = getattr(ctx.attrs, "no_auto_version_resources", False),
no_version_transitions = getattr(ctx.attrs, "no_version_transitions_resources", False),
no_auto_add_overlay = getattr(ctx.attrs, "no_auto_add_overlay_resources", False),
use_proto_format = use_proto_format,
no_resource_removal = True,
package_id_offset = 0,
should_keep_raw_values = getattr(ctx.attrs, "aapt2_keep_raw_values", False),
resource_stable_ids = getattr(ctx.attrs, "resource_stable_ids", None),
compiled_resource_apks = [],
additional_aapt2_params = getattr(ctx.attrs, "additional_aapt_params", []),
extra_filtered_resources = getattr(ctx.attrs, "extra_filtered_resources", []),
locales = getattr(ctx.attrs, "locales", []) or getattr(ctx.attrs, "locales_for_binary_resources", []),
filter_locales = getattr(ctx.attrs, "aapt2_locale_filtering", False) or bool(getattr(ctx.attrs, "locales_for_binary_resources", [])),
min_sdk = aapt2_min_sdk,
preferred_density = aapt2_preferred_density,
)
prebuilt_jars = [packaging_dep.jar for packaging_dep in java_packaging_deps if packaging_dep.is_prebuilt_jar]
cxx_resources = _get_cxx_resources(ctx, deps)
is_exopackaged_enabled_for_resources = "resources" in getattr(ctx.attrs, "exopackage_modes", [])
primary_resources_apk, exopackaged_assets, exopackaged_assets_hash = _merge_assets(
ctx,
is_exopackaged_enabled_for_resources,
aapt2_link_info.primary_resources_apk,
resource_infos,
cxx_resources,
)
if is_exopackaged_enabled_for_resources:
r_dot_txt = ctx.actions.declare_output("after_exo/R.txt")
primary_resources_apk = ctx.actions.declare_output("after_exo/primary_resources_apk.apk")
exo_resources = ctx.actions.declare_output("exo_resources.apk")
exo_resources_hash = ctx.actions.declare_output("exo_resources.apk.hash")
ctx.actions.run(cmd_args([
android_toolchain.exo_resources_rewriter[RunInfo],
"--original-r-dot-txt",
aapt2_link_info.r_dot_txt,
"--new-r-dot-txt",
r_dot_txt.as_output(),
"--original-primary-apk-resources",
aapt2_link_info.primary_resources_apk,
"--new-primary-apk-resources",
primary_resources_apk.as_output(),
"--exo-resources",
exo_resources.as_output(),
"--exo-resources-hash",
exo_resources_hash.as_output(),
"--zipalign-tool",
android_toolchain.zipalign[RunInfo],
]), category = "write_exo_resources")
third_party_jars = ctx.actions.write("third_party_jars", prebuilt_jars)
third_party_jar_resources = ctx.actions.declare_output("third_party_jars.resources")
third_party_jar_resources_hash = ctx.actions.declare_output("third_party_jars.resources.hash")
ctx.actions.run(cmd_args([
android_toolchain.merge_third_party_jar_resources[RunInfo],
"--output",
third_party_jar_resources.as_output(),
"--output-hash",
third_party_jar_resources_hash.as_output(),
"--third-party-jars",
third_party_jars,
]).hidden(prebuilt_jars), category = "merge_third_party_jar_resources")
exopackage_info = ExopackageResourcesInfo(
assets = exopackaged_assets,
assets_hash = exopackaged_assets_hash,
res = exo_resources,
res_hash = exo_resources_hash,
third_party_jar_resources = third_party_jar_resources,
third_party_jar_resources_hash = third_party_jar_resources_hash,
)
jar_files_that_may_contain_resources = []
else:
exopackage_info = None
jar_files_that_may_contain_resources = prebuilt_jars
r_dot_txt = aapt2_link_info.r_dot_txt
override_symbols_paths = [override_symbols] if override_symbols else []
resources = [resource for resource in resource_infos if resource.res != None]
r_dot_javas = [] if len(resources) == 0 else generate_r_dot_javas(
ctx,
ctx.attrs._android_toolchain[AndroidToolchainInfo].merge_android_resources[RunInfo],
resources,
get_effective_banned_duplicate_resource_types(
getattr(ctx.attrs, "duplicate_resource_behavior", "allow_by_default"),
getattr(ctx.attrs, "allowed_duplicate_resource_types", []),
getattr(ctx.attrs, "banned_duplicate_resource_types", []),
),
[r_dot_txt],
override_symbols_paths,
getattr(ctx.attrs, "duplicate_resource_whitelist", None),
getattr(ctx.attrs, "resource_union_package", None),
referenced_resources_lists,
generate_strings_and_ids_separately = generate_strings_and_ids_separately,
)
string_source_map = _maybe_generate_string_source_map(
ctx.actions,
getattr(ctx.attrs, "build_string_source_map", False),
resources,
android_toolchain,
)
voltron_string_source_map = _maybe_generate_string_source_map(
ctx.actions,
getattr(ctx.attrs, "is_voltron_language_pack_enabled", False),
resources,
android_toolchain,
is_voltron_string_source_map = True,
)
packaged_string_assets = _maybe_package_strings_as_assets(
ctx,
string_files_list,
string_files_res_dirs,
r_dot_txt,
android_toolchain,
)
return AndroidBinaryResourcesInfo(
exopackage_info = exopackage_info,
manifest = android_manifest,
module_manifests = module_manifests,
packaged_string_assets = packaged_string_assets,
primary_resources_apk = primary_resources_apk,
proguard_config_file = aapt2_link_info.proguard_config_file,
r_dot_javas = r_dot_javas,
string_source_map = string_source_map,
voltron_string_source_map = voltron_string_source_map,
jar_files_that_may_contain_resources = jar_files_that_may_contain_resources,
unfiltered_resource_infos = unfiltered_resource_infos,
)
def _maybe_filter_resources(
ctx: "context",
resources: [AndroidResourceInfo.type],
android_toolchain: AndroidToolchainInfo.type) -> ([AndroidResourceInfo.type], ["artifact", None], ["artifact", None], ["artifact"]):
resources_filter_strings = getattr(ctx.attrs, "resource_filter", [])
resources_filter = _get_resources_filter(resources_filter_strings)
resource_compression_mode = getattr(ctx.attrs, "resource_compression", "disabled")
is_store_strings_as_assets = _is_store_strings_as_assets(resource_compression_mode)
locales = getattr(ctx.attrs, "locales", None)
use_aapt2_locale_filtering = getattr(ctx.attrs, "aapt2_locale_filtering", False)
needs_resource_filtering_for_locales = locales != None and len(locales) > 0 and not use_aapt2_locale_filtering
post_filter_resources_cmd = getattr(ctx.attrs, "post_filter_resources_cmd", None)
needs_resource_filtering = (
resources_filter != None or
is_store_strings_as_assets or
needs_resource_filtering_for_locales or
post_filter_resources_cmd != None
)
if not needs_resource_filtering:
return resources, None, None, []
res_info_to_out_res_dir = {}
voltron_res_info_to_out_res_dir = {}
res_infos_with_no_res = []
skip_crunch_pngs = getattr(ctx.attrs, "skip_crunch_pngs", None) or False
is_voltron_language_pack_enabled = getattr(ctx.attrs, "is_voltron_language_pack_enabled", False)
for i, resource in enumerate(resources):
if resource.res == None:
res_infos_with_no_res.append(resource)
else:
filtered_res = ctx.actions.declare_output("filtered_res_{}".format(i), dir = True)
res_info_to_out_res_dir[resource] = filtered_res
if is_voltron_language_pack_enabled:
filtered_res_for_voltron = ctx.actions.declare_output("filtered_res_for_voltron_{}".format(i), dir = True)
voltron_res_info_to_out_res_dir[resource] = filtered_res_for_voltron
filter_resources_cmd = cmd_args(android_toolchain.filter_resources[RunInfo])
in_res_dir_to_out_res_dir_dict = {
in_res.res: out_res
for in_res, out_res in res_info_to_out_res_dir.items()
}
in_res_dir_to_out_res_dir_map = ctx.actions.write_json("in_res_dir_to_out_res_dir_map", {"res_dir_map": in_res_dir_to_out_res_dir_dict})
in_res_dirs = [in_res.res for in_res in res_info_to_out_res_dir.keys()]
filter_resources_cmd.hidden(in_res_dirs)
filter_resources_cmd.hidden([out_res.as_output() for out_res in res_info_to_out_res_dir.values()])
filter_resources_cmd.add([
"--in-res-dir-to-out-res-dir-map",
in_res_dir_to_out_res_dir_map,
])
if is_voltron_language_pack_enabled:
voltron_in_res_dir_to_out_res_dir_dict = {
in_res.res: out_res
for in_res, out_res in voltron_res_info_to_out_res_dir.items()
}
voltron_in_res_dir_to_out_res_dir_map = ctx.actions.write_json("voltron_in_res_dir_to_out_res_dir_map", {"res_dir_map": voltron_in_res_dir_to_out_res_dir_dict})
filter_resources_cmd.hidden([out_res.as_output() for out_res in voltron_res_info_to_out_res_dir.values()])
filter_resources_cmd.add([
"--voltron-in-res-dir-to-out-res-dir-map",
voltron_in_res_dir_to_out_res_dir_map,
])
if resources_filter:
filter_resources_cmd.add([
"--target-densities",
",".join(resources_filter.densities),
])
all_strings_files_list = None
all_strings_files_res_dirs = []
if is_store_strings_as_assets:
all_strings_files_list = ctx.actions.declare_output("all_strings_files")
all_strings_files_res_dirs = in_res_dirs
filter_resources_cmd.add([
"--enable-string-as-assets-filtering",
"--string-files-list-output",
all_strings_files_list.as_output(),
])
packaged_locales = getattr(ctx.attrs, "packaged_locales", [])
if packaged_locales:
filter_resources_cmd.add([
"--packaged-locales",
",".join(packaged_locales),
])
not_filtered_string_dirs = [resource.res for resource in resources if not resource.allow_strings_as_assets_resource_filtering]
if not_filtered_string_dirs:
filter_resources_cmd.add([
"--not-filtered-string-dirs",
ctx.actions.write("not_filtered_string_dirs", not_filtered_string_dirs),
])
if needs_resource_filtering_for_locales:
filter_resources_cmd.add([
"--locales",
",".join(locales),
])
override_symbols_artifact = None
if post_filter_resources_cmd != None:
override_symbols_artifact = ctx.actions.declare_output("post_filter_resources_cmd/R.json")
filter_resources_cmd.add([
"--post-filter-resources-cmd",
post_filter_resources_cmd,
"--post-filter-resources-cmd-override-symbols-output",
override_symbols_artifact.as_output(),
])
ctx.actions.run(filter_resources_cmd, category = "filter_resources")
filtered_resource_infos = []
for i, resource in enumerate(resources):
if resource.res == None:
continue
filtered_res = res_info_to_out_res_dir[resource]
filtered_aapt2_compile_output = aapt2_compile(
ctx,
filtered_res,
android_toolchain,
skip_crunch_pngs = skip_crunch_pngs,
identifier = "filtered_res_{}".format(i),
)
filtered_resource = AndroidResourceInfo(
raw_target = resource.raw_target,
aapt2_compile_output = filtered_aapt2_compile_output,
assets = resource.assets,
manifest_file = resource.manifest_file,
r_dot_java_package = resource.r_dot_java_package,
res = filtered_res,
text_symbols = resource.text_symbols,
)
filtered_resource_infos.append(filtered_resource)
return (
res_infos_with_no_res + filtered_resource_infos,
override_symbols_artifact,
all_strings_files_list,
all_strings_files_res_dirs,
)
ResourcesFilter = record(
densities = [str.type],
downscale = bool.type,
)
def _get_resources_filter(resources_filter_strings: [str.type]) -> [ResourcesFilter.type, None]:
if not resources_filter_strings:
return None
densities = [resources_filter_string for resources_filter_string in resources_filter_strings if resources_filter_string != "downscale"]
if not densities:
return None
downscale = len(densities) < len(resources_filter_strings)
return ResourcesFilter(densities = densities, downscale = downscale)
def _maybe_generate_string_source_map(
actions: "actions",
should_build_source_string_map: bool.type,
resource_infos: [AndroidResourceInfo.type],
android_toolchain: AndroidToolchainInfo.type,
is_voltron_string_source_map: bool.type = False) -> ["artifact", None]:
if not should_build_source_string_map or len(resource_infos) == 0:
return None
prefix = "voltron_" if is_voltron_string_source_map else ""
res_dirs = [resource_info.res for resource_info in resource_infos]
output = actions.declare_output("{}string_source_map".format(prefix), dir = True)
res_dirs_file = actions.write("resource_dirs_for_{}string_source_map".format(prefix), res_dirs)
generate_string_source_map_cmd = cmd_args([
android_toolchain.copy_string_resources[RunInfo],
"--res-dirs",
res_dirs_file,
"--output",
output.as_output(),
]).hidden(res_dirs)
if is_voltron_string_source_map:
generate_string_source_map_cmd.add("--is-voltron")
actions.run(generate_string_source_map_cmd, category = "generate_{}string_source_map".format(prefix))
return output
def _maybe_package_strings_as_assets(
ctx: "context",
string_files_list: ["artifact", None],
string_files_res_dirs: ["artifact"],
r_dot_txt: "artifact",
android_toolchain: AndroidToolchainInfo.type) -> ["artifact", None]:
resource_compression_mode = getattr(ctx.attrs, "resource_compression", "disabled")
is_store_strings_as_assets = _is_store_strings_as_assets(resource_compression_mode)
expect(is_store_strings_as_assets == (string_files_list != None))
if not is_store_strings_as_assets:
return None
string_assets_dir = ctx.actions.declare_output("package_strings_as_assets/string_assets", dir = True)
string_assets_zip = ctx.actions.declare_output("package_strings_as_assets/string_assets_zip.zip")
all_locales_string_assets_zip = ctx.actions.declare_output("package_strings_as_assets/all_locales_string_assets_zip.zip")
locales = getattr(ctx.attrs, "locales", [])
package_strings_as_assets_cmd = cmd_args([
android_toolchain.package_strings_as_assets[RunInfo],
"--string-files-list",
string_files_list,
"--r-dot-txt",
r_dot_txt,
"--string-assets-dir",
string_assets_dir.as_output(),
"--string-assets-zip",
string_assets_zip.as_output(),
"--all-locales-string-assets-zip",
all_locales_string_assets_zip.as_output(),
]).hidden(string_files_res_dirs)
if locales:
package_strings_as_assets_cmd.add("--locales", ",".join(locales))
ctx.actions.run(package_strings_as_assets_cmd, category = "package_strings_as_assets")
return string_assets_zip
def _get_manifest(
ctx: "context",
android_packageable_info: "AndroidPackageableInfo",
manifest_entries: dict.type) -> "artifact":
robolectric_manifest = getattr(ctx.attrs, "robolectric_manifest", None)
if robolectric_manifest:
return robolectric_manifest
android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo]
if ctx.attrs.manifest:
expect(ctx.attrs.manifest_skeleton == None, "Only one of manifest and manifest_skeleton should be declared")
if type(ctx.attrs.manifest) == "dependency":
android_manifest = ctx.attrs.manifest[DefaultInfo].default_outputs[0]
else:
android_manifest = ctx.attrs.manifest
else:
expect(ctx.attrs.manifest_skeleton != None, "Must declare one of manifest and manifest_skeleton")
if type(ctx.attrs.manifest_skeleton) == "dependency":
manifest_skeleton = ctx.attrs.manifest_skeleton[DefaultInfo].default_outputs[0]
else:
manifest_skeleton = ctx.attrs.manifest_skeleton
android_manifest, _ = generate_android_manifest(
ctx,
android_toolchain.generate_manifest[RunInfo],
manifest_skeleton,
ROOT_MODULE,
android_packageable_info.manifests,
manifest_entries.get("placeholders", {}),
)
if android_toolchain.set_application_id_to_specified_package:
android_manifest_with_replaced_application_id = ctx.actions.declare_output("android_manifest_with_replaced_application_id")
replace_application_id_placeholders_cmd = cmd_args([
ctx.attrs._android_toolchain[AndroidToolchainInfo].replace_application_id_placeholders[RunInfo],
"--manifest",
android_manifest,
"--output",
android_manifest_with_replaced_application_id.as_output(),
])
if android_toolchain.should_run_sanity_check_for_placeholders:
replace_application_id_placeholders_cmd.add("--sanity-check-placeholders")
ctx.actions.run(replace_application_id_placeholders_cmd, category = "replace_application_id_placeholders")
return android_manifest_with_replaced_application_id
else:
return android_manifest
def _get_module_manifests(
ctx: "context",
android_packageable_info: "AndroidPackageableInfo",
manifest_entries: dict.type,
apk_module_graph_file: ["artifact", None]) -> ["artifact"]:
if not apk_module_graph_file:
return []
if not ctx.attrs.module_manifest_skeleton:
return []
if type(ctx.attrs.module_manifest_skeleton) == "dependency":
module_manifest_skeleton = ctx.attrs.module_manifest_skeleton[DefaultInfo].default_outputs[0]
else:
module_manifest_skeleton = ctx.attrs.module_manifest_skeleton
android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo]
module_manifests_dir = ctx.actions.declare_output("module_manifests_dir", dir = True)
android_manifests = list(android_packageable_info.manifests.traverse()) if android_packageable_info.manifests else []
def get_manifests_modular(ctx: "context", artifacts, outputs):
apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts)
get_module_from_target = apk_module_graph_info.target_to_module_mapping_function
module_to_manifests = {}
for android_manifest in android_manifests:
module_name = get_module_from_target(str(android_manifest.target_label))
if not is_root_module(module_name):
module_to_manifests.setdefault(module_name, []).append(android_manifest.manifest)
merged_module_manifests = {}
for module_name in apk_module_graph_info.module_list:
merged_module_manifest, _ = generate_android_manifest(
ctx,
android_toolchain.generate_manifest[RunInfo],
module_manifest_skeleton,
module_name,
module_to_manifests.get(module_name, []),
manifest_entries.get("placeholders", {}),
)
merged_module_manifests["assets/{}/AndroidManifest.xml".format(module_name)] = merged_module_manifest
ctx.actions.symlinked_dir(outputs[module_manifests_dir], merged_module_manifests)
ctx.actions.dynamic_output(
dynamic = [apk_module_graph_file],
inputs = [],
outputs = [module_manifests_dir],
f = get_manifests_modular,
)
return [module_manifests_dir]
# Returns the "primary resources APK" (i.e. the resource that are packaged into the primary APK),
# and optionally an "exopackaged assets APK" and the hash for that APK.
def _merge_assets(
ctx: "context",
is_exopackaged_enabled_for_resources: bool.type,
base_apk: "artifact",
resource_infos: ["AndroidResourceInfo"],
cxx_resources: ["artifact", None]) -> ("artifact", ["artifact", None], ["artifact", None]):
assets_dirs = [resource_info.assets for resource_info in resource_infos if resource_info.assets]
if cxx_resources != None:
assets_dirs.extend([cxx_resources])
if len(assets_dirs) == 0:
return base_apk, None, None
merge_assets_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].merge_assets[RunInfo])
merged_assets_output = ctx.actions.declare_output("merged_assets.ap_")
merge_assets_cmd.add(["--output-apk", merged_assets_output.as_output()])
if is_exopackaged_enabled_for_resources:
merged_assets_output_hash = ctx.actions.declare_output("merged_assets.ap_.hash")
merge_assets_cmd.add(["--output-apk-hash", merged_assets_output_hash.as_output()])
else:
merge_assets_cmd.add(["--base-apk", base_apk])
merged_assets_output_hash = None
assets_dirs_file = ctx.actions.write("assets_dirs", assets_dirs)
merge_assets_cmd.add(["--assets-dirs", assets_dirs_file])
merge_assets_cmd.hidden(assets_dirs)
ctx.actions.run(merge_assets_cmd, category = "merge_assets")
if is_exopackaged_enabled_for_resources:
return base_apk, merged_assets_output, merged_assets_output_hash
else:
return merged_assets_output, None, None
def get_effective_banned_duplicate_resource_types(
duplicate_resource_behavior: str.type,
allowed_duplicate_resource_types: [str.type],
banned_duplicate_resource_types: [str.type]) -> [str.type]:
if duplicate_resource_behavior == "allow_by_default":
expect(
len(allowed_duplicate_resource_types) == 0,
"Cannot set allowed_duplicate_resource_types if duplicate_resource_behaviour is allow_by_default",
)
return banned_duplicate_resource_types
elif duplicate_resource_behavior == "ban_by_default":
expect(
len(banned_duplicate_resource_types) == 0,
"Cannot set banned_duplicate_resource_types if duplicate_resource_behaviour is ban_by_default",
)
return [rtype for rtype in RType if rtype not in allowed_duplicate_resource_types]
else:
fail("Unrecognized duplicate_resource_behavior: {}".format(duplicate_resource_behavior))
def _get_cxx_resources(ctx: "context", deps: ["dependency"]) -> ["artifact", None]:
cxx_resources = gather_resources(
label = ctx.label,
resources = {},
deps = deps,
)
symlink_tree_dict = {}
resource_maps = cxx_resources.values()
for resource_map in resource_maps:
for name, (resource, _other) in resource_map.items():
symlink_tree_dict["cxx-resources/{}".format(name)] = resource
return ctx.actions.symlinked_dir("cxx_resources_dir", symlink_tree_dict) if symlink_tree_dict else None
def _is_store_strings_as_assets(resource_compression: str.type) -> bool.type:
return resource_compression == "enabled_strings_only" or resource_compression == "enabled_with_strings_as_assets"

View File

@ -0,0 +1,113 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//java:java_library.bzl", "compile_to_jar")
load("@prelude//java:java_providers.bzl", "JavaLibraryInfo", "JavaPackagingDepTSet", "JavaPackagingInfo", "create_java_packaging_dep", "derive_compiling_deps")
load(":android_providers.bzl", "AndroidBuildConfigInfo", "BuildConfigField", "merge_android_packageable_info")
def android_build_config_impl(ctx: "context") -> ["provider"]:
providers = []
default_build_config_fields = get_build_config_fields(ctx.attrs.values)
android_build_config_info = AndroidBuildConfigInfo(package = ctx.attrs.package, build_config_fields = default_build_config_fields)
providers.append(android_build_config_info)
providers.append(merge_android_packageable_info(ctx.label, ctx.actions, deps = [], build_config_info = android_build_config_info))
build_config_dot_java_library, java_packaging_info = generate_android_build_config(
ctx,
ctx.attrs.name,
ctx.attrs.package,
False,
default_build_config_fields,
ctx.attrs.values_file,
)
providers.append(java_packaging_info)
providers.append(build_config_dot_java_library)
providers.append(DefaultInfo(default_output = build_config_dot_java_library.library_output.full_library))
return providers
def generate_android_build_config(
ctx: "context",
source: str.type,
java_package: str.type,
use_constant_expressions: bool.type,
default_values: ["BuildConfigField"],
values_file: ["artifact", None]) -> ("JavaLibraryInfo", "JavaPackagingInfo"):
build_config_dot_java = _generate_build_config_dot_java(ctx, source, java_package, use_constant_expressions, default_values, values_file)
compiled_build_config_dot_java = _compile_and_package_build_config_dot_java(ctx, java_package, build_config_dot_java)
library_output = compiled_build_config_dot_java.classpath_entry
packaging_deps_kwargs = {"value": create_java_packaging_dep(ctx, library_output.full_library)}
packaging_deps = ctx.actions.tset(JavaPackagingDepTSet, **packaging_deps_kwargs)
return (JavaLibraryInfo(
compiling_deps = derive_compiling_deps(ctx.actions, library_output, []),
library_output = library_output,
output_for_classpath_macro = library_output.full_library,
), JavaPackagingInfo(
packaging_deps = packaging_deps,
))
def _generate_build_config_dot_java(
ctx: "context",
source: str.type,
java_package: str.type,
use_constant_expressions: bool.type,
default_values: ["BuildConfigField"],
values_file: ["artifact", None]) -> "artifact":
generate_build_config_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].generate_build_config[RunInfo])
generate_build_config_cmd.add([
"--source",
source,
"--java-package",
java_package,
"--use-constant-expressions",
str(use_constant_expressions),
])
default_values_file = ctx.actions.write(
_get_output_name(java_package, "default_values"),
["{} {} = {}".format(x.type, x.name, x.value) for x in default_values],
)
generate_build_config_cmd.add(["--default-values-file", default_values_file])
if values_file:
generate_build_config_cmd.add(["--values-file", values_file])
build_config_dot_java = ctx.actions.declare_output(_get_output_name(java_package, "BuildConfig.java"))
generate_build_config_cmd.add(["--output", build_config_dot_java.as_output()])
ctx.actions.run(
generate_build_config_cmd,
category = "android_generate_build_config",
identifier = java_package,
)
return build_config_dot_java
def _compile_and_package_build_config_dot_java(
ctx: "context",
java_package: str.type,
build_config_dot_java: "artifact") -> "JavaCompileOutputs":
return compile_to_jar(
ctx,
actions_identifier = "build_config_{}".format(java_package.replace(".", "_")),
srcs = [build_config_dot_java],
)
def get_build_config_fields(lines: [str.type]) -> ["BuildConfigField"]:
return [_get_build_config_field(line) for line in lines]
def _get_build_config_field(line: str.type) -> "BuildConfigField":
type_and_name, value = [x.strip() for x in line.split("=")]
field_type, name = type_and_name.split()
return BuildConfigField(type = field_type, name = name, value = value)
def _get_output_name(java_package: str.type, output_filename: str.type) -> str.type:
return "android_build_config/{}/{}".format(java_package.replace(".", "_"), output_filename)

View File

@ -0,0 +1,93 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_apk.bzl", "build_apk")
load("@prelude//android:android_binary_native_library_rules.bzl", "get_android_binary_native_library_info")
load("@prelude//android:android_binary_resources_rules.bzl", "get_android_binary_resources_info")
load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidApkUnderTestInfo", "AndroidInstrumentationApkInfo", "merge_android_packageable_info")
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//android:configuration.bzl", "get_deps_by_platform")
load("@prelude//android:dex_rules.bzl", "merge_to_single_dex")
load("@prelude//java:java_providers.bzl", "create_java_packaging_dep", "get_all_java_packaging_deps")
load("@prelude//utils:utils.bzl", "expect")
def android_instrumentation_apk_impl(ctx: "context"):
# To begin with, let's just implement something that has a single DEX file and a manifest.
_verify_params(ctx)
apk_under_test_info = ctx.attrs.apk[AndroidApkUnderTestInfo]
# android_instrumentation_apk should just use the same platforms and primary_platform as the APK-under-test
unfiltered_deps_by_platform = get_deps_by_platform(ctx)
for platform in apk_under_test_info.platforms:
expect(
platform in unfiltered_deps_by_platform,
"Android instrumentation APK must have any platforms that are in the APK-under-test!",
)
deps_by_platform = {platform: deps for platform, deps in unfiltered_deps_by_platform.items() if platform in apk_under_test_info.platforms}
primary_platform = apk_under_test_info.primary_platform
deps = deps_by_platform[primary_platform]
java_packaging_deps = [
packaging_dep
for packaging_dep in get_all_java_packaging_deps(ctx, deps)
if packaging_dep.dex and not apk_under_test_info.java_packaging_deps.contains(packaging_dep.label.raw_target())
]
android_packageable_info = merge_android_packageable_info(ctx.label, ctx.actions, deps)
resources_info = get_android_binary_resources_info(
ctx,
deps,
android_packageable_info,
java_packaging_deps = java_packaging_deps,
use_proto_format = False,
referenced_resources_lists = [],
manifest_entries = apk_under_test_info.manifest_entries,
resource_infos_to_exclude = apk_under_test_info.resource_infos,
)
android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo]
java_packaging_deps += [
create_java_packaging_dep(
ctx,
r_dot_java.library_output.full_library,
dex_weight_factor = android_toolchain.r_dot_java_weight_factor,
)
for r_dot_java in resources_info.r_dot_javas
]
# For instrumentation test APKs we always pre-dex, and we also always merge to a single dex.
pre_dexed_libs = [java_packaging_dep.dex for java_packaging_dep in java_packaging_deps]
dex_files_info = merge_to_single_dex(ctx, android_toolchain, pre_dexed_libs)
native_library_info = get_android_binary_native_library_info(
ctx,
android_packageable_info,
deps_by_platform,
prebuilt_native_library_dirs_to_exclude = apk_under_test_info.prebuilt_native_library_dirs,
shared_libraries_to_exclude = apk_under_test_info.shared_libraries,
)
output_apk = build_apk(
label = ctx.label,
actions = ctx.actions,
android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo],
keystore = apk_under_test_info.keystore,
dex_files_info = dex_files_info,
native_library_info = native_library_info,
resources_info = resources_info,
)
return [
AndroidApkInfo(apk = output_apk, manifest = resources_info.manifest),
AndroidInstrumentationApkInfo(apk_under_test = ctx.attrs.apk[AndroidApkInfo].apk),
DefaultInfo(default_output = output_apk),
]
def _verify_params(ctx: "context"):
expect(ctx.attrs.aapt_mode == "aapt2", "aapt1 is deprecated!")
expect(ctx.attrs.dex_tool == "d8", "dx is deprecated!")

View File

@ -0,0 +1,97 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidInstrumentationApkInfo")
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo")
load("@prelude//java/utils:java_utils.bzl", "get_path_separator")
load("@prelude//utils:utils.bzl", "expect")
load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info")
def android_instrumentation_test_impl(ctx: "context"):
android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo]
cmd = [ctx.attrs._java_toolchain[JavaToolchainInfo].java_for_tests]
classpath = android_toolchain.instrumentation_test_runner_classpath
classpath_args = cmd_args()
classpath_args.add("-classpath")
classpath_args.add(cmd_args(classpath, delimiter = get_path_separator()))
classpath_args_file = ctx.actions.write("classpath_args_file", classpath_args)
cmd.append(cmd_args(classpath_args_file, format = "@{}").hidden(classpath_args))
cmd.append(android_toolchain.instrumentation_test_runner_main_class)
apk_info = ctx.attrs.apk.get(AndroidApkInfo)
expect(apk_info != None, "Provided APK must have AndroidApkInfo!")
instrumentation_apk_info = ctx.attrs.apk.get(AndroidInstrumentationApkInfo)
if instrumentation_apk_info != None:
cmd.extend(["--apk-under-test-path", instrumentation_apk_info.apk_under_test])
target_package_file = ctx.actions.declare_output("target_package_file")
package_file = ctx.actions.declare_output("package_file")
test_runner_file = ctx.actions.declare_output("test_runner_file")
manifest_utils_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].manifest_utils[RunInfo])
manifest_utils_cmd.add([
"--manifest-path",
apk_info.manifest,
"--package-output",
package_file.as_output(),
"--target-package-output",
target_package_file.as_output(),
"--instrumentation-test-runner-output",
test_runner_file.as_output(),
])
ctx.actions.run(manifest_utils_cmd, category = "get_manifest_info")
cmd.extend(
[
"--test-package-name",
cmd_args(package_file, format = "@{}"),
"--target-package-name",
cmd_args(target_package_file, format = "@{}"),
"--test-runner",
cmd_args(test_runner_file, format = "@{}"),
],
)
cmd.extend(
[
"--adb-executable-path",
"required_but_unused",
"--instrumentation-apk-path",
apk_info.apk,
],
)
test_info = ExternalRunnerTestInfo(
type = "android_instrumentation",
command = cmd,
env = ctx.attrs.env,
# TODO(T122022107) support static listing
labels = ctx.attrs.labels + ["tpx::dynamic_listing_instrumentation_test"],
contacts = ctx.attrs.contacts,
run_from_project_root = True,
use_project_relative_paths = True,
executor_overrides = {
"android-emulator": CommandExecutorConfig(
local_enabled = android_toolchain.instrumentation_test_can_run_locally,
remote_enabled = True,
remote_execution_properties = {
"platform": "android-emulator",
"subplatform": "android-30",
},
remote_execution_use_case = "instrumentation-tests",
),
"static-listing": CommandExecutorConfig(local_enabled = True, remote_enabled = False),
},
)
return inject_test_run_info(ctx, test_info) + [
DefaultInfo(),
]

View File

@ -0,0 +1,96 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(
"@prelude//android:android_providers.bzl",
"AndroidLibraryIntellijInfo",
"AndroidResourceInfo",
"merge_android_packageable_info",
"merge_exported_android_resource_info",
)
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//android:r_dot_java.bzl", "get_dummy_r_dot_java")
load("@prelude//java:java_library.bzl", "build_java_library")
load("@prelude//java:java_providers.bzl", "create_native_providers", "to_list")
load("@prelude//kotlin:kotlin_library.bzl", "build_kotlin_library")
def android_library_impl(ctx: "context") -> ["provider"]:
packaging_deps = ctx.attrs.deps + (ctx.attrs.deps_query or []) + ctx.attrs.exported_deps + ctx.attrs.runtime_deps
if ctx.attrs._build_only_native_code:
shared_library_info, cxx_resource_info = create_native_providers(ctx.actions, ctx.label, packaging_deps)
return [
shared_library_info,
cxx_resource_info,
# Add an unused default output in case this target is used as an attr.source() anywhere.
DefaultInfo(default_output = ctx.actions.write("{}/unused.jar".format(ctx.label.name), [])),
]
java_providers, android_library_intellij_info = build_android_library(ctx)
android_providers = [android_library_intellij_info] if android_library_intellij_info else []
return to_list(java_providers) + [
merge_android_packageable_info(
ctx.label,
ctx.actions,
packaging_deps,
manifest = ctx.attrs.manifest,
),
merge_exported_android_resource_info(ctx.attrs.exported_deps),
] + android_providers
def build_android_library(
ctx: "context",
r_dot_java: ["artifact", None] = None) -> ("JavaProviders", [AndroidLibraryIntellijInfo.type, None]):
bootclasspath_entries = [] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath
additional_classpath_entries = []
dummy_r_dot_java, android_library_intellij_info = _get_dummy_r_dot_java(ctx)
extra_sub_targets = {}
if r_dot_java:
additional_classpath_entries.append(r_dot_java)
elif dummy_r_dot_java:
additional_classpath_entries.append(dummy_r_dot_java)
extra_sub_targets["dummy_r_dot_java"] = [DefaultInfo(default_output = dummy_r_dot_java)]
if ctx.attrs.language != None and ctx.attrs.language.lower() == "kotlin":
return build_kotlin_library(
ctx,
additional_classpath_entries = additional_classpath_entries,
bootclasspath_entries = bootclasspath_entries,
extra_sub_targets = extra_sub_targets,
), android_library_intellij_info
else:
return build_java_library(
ctx,
ctx.attrs.srcs,
additional_classpath_entries = additional_classpath_entries,
bootclasspath_entries = bootclasspath_entries,
extra_sub_targets = extra_sub_targets,
), android_library_intellij_info
def _get_dummy_r_dot_java(
ctx: "context") -> (["artifact", None], [AndroidLibraryIntellijInfo.type, None]):
android_resources = dedupe([resource for resource in filter(None, [
x.get(AndroidResourceInfo)
for x in ctx.attrs.deps + (ctx.attrs.deps_query or []) + ctx.attrs.provided_deps + (getattr(ctx.attrs, "provided_deps_query", []) or [])
]) if resource.res != None])
if len(android_resources) == 0:
return (None, None)
dummy_r_dot_java_library_info = get_dummy_r_dot_java(
ctx,
ctx.attrs._android_toolchain[AndroidToolchainInfo].merge_android_resources[RunInfo],
android_resources,
ctx.attrs.resource_union_package,
)
dummy_r_dot_java = dummy_r_dot_java_library_info.library_output.abi
return (dummy_r_dot_java, AndroidLibraryIntellijInfo(
dummy_r_dot_java = dummy_r_dot_java,
android_resource_deps = android_resources,
))

View File

@ -0,0 +1,77 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_providers.bzl", "AndroidManifestInfo", "merge_android_packageable_info")
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//android:voltron.bzl", "ROOT_MODULE")
def android_manifest_impl(ctx: "context") -> ["provider"]:
output, merge_report = generate_android_manifest(
ctx,
ctx.attrs._android_toolchain[AndroidToolchainInfo].generate_manifest[RunInfo],
ctx.attrs.skeleton,
ROOT_MODULE,
_get_manifests_from_deps(ctx),
{},
)
return [
AndroidManifestInfo(manifest = output, merge_report = merge_report),
DefaultInfo(default_output = output, other_outputs = [merge_report]),
]
def generate_android_manifest(
ctx: "context",
generate_manifest: RunInfo.type,
manifest_skeleton: "artifact",
module_name: str.type,
manifests: ["ManifestTSet", ["artifact"], None],
placeholder_entries: "dict") -> ("artifact", "artifact"):
generate_manifest_cmd = cmd_args(generate_manifest)
generate_manifest_cmd.add([
"--skeleton-manifest",
manifest_skeleton,
"--module-name",
module_name,
])
if not manifests:
manifests = []
elif type(manifests) == "transitive_set":
manifests = manifests.project_as_args("artifacts", ordering = "bfs")
library_manifest_paths_file = ctx.actions.write("{}/library_manifest_paths_file".format(module_name), manifests)
generate_manifest_cmd.add(["--library-manifests-list", library_manifest_paths_file])
generate_manifest_cmd.hidden(manifests)
placeholder_entries_args = cmd_args()
for key, val in placeholder_entries.items():
placeholder_entries_args.add(cmd_args(key, val, delimiter = " "))
placeholder_entries_file = ctx.actions.write("{}/placeholder_entries_file".format(module_name), placeholder_entries_args)
generate_manifest_cmd.add(["--placeholder-entries-list", placeholder_entries_file])
output = ctx.actions.declare_output("{}/AndroidManifest.xml".format(module_name))
merge_report = ctx.actions.declare_output("{}/merge-report.txt".format(module_name))
generate_manifest_cmd.add([
"--output",
output.as_output(),
"--merge-report",
merge_report.as_output(),
])
ctx.actions.run(generate_manifest_cmd, category = "generate_manifest", identifier = module_name)
return (output, merge_report)
def _get_manifests_from_deps(ctx: "context") -> ["ManifestTSet", None]:
if len(ctx.attrs.deps) == 0:
return None
android_packageable_info = merge_android_packageable_info(ctx.label, ctx.actions, ctx.attrs.deps)
return android_packageable_info.manifests

View File

@ -0,0 +1,115 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_providers.bzl", "AndroidResourceInfo", "PrebuiltNativeLibraryDir", "merge_android_packageable_info")
load("@prelude//android:android_resource.bzl", "aapt2_compile", "extract_package_from_manifest")
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load(
"@prelude//java:java_providers.bzl",
"JavaClasspathEntry",
"create_abi",
"create_java_library_providers",
)
load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo")
def android_prebuilt_aar_impl(ctx: "context") -> ["provider"]:
manifest = ctx.actions.declare_output("AndroidManifest.xml")
all_classes_jar = ctx.actions.declare_output("classes.jar")
r_dot_txt = ctx.actions.declare_output("R.txt")
res = ctx.actions.declare_output("res", dir = True)
assets = ctx.actions.declare_output("assets", dir = True)
jni = ctx.actions.declare_output("jni", dir = True)
annotation_jars_dir = ctx.actions.declare_output("annotation_jars", dir = True)
proguard_config = ctx.actions.declare_output("proguard.txt")
android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo]
unpack_aar_tool = android_toolchain.unpack_aar[RunInfo]
java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo]
jar_tool = java_toolchain.jar
unpack_aar_cmd = [
unpack_aar_tool,
"--aar",
ctx.attrs.aar,
"--manifest-path",
manifest.as_output(),
"--all-classes-jar-path",
all_classes_jar.as_output(),
"--r-dot-txt-path",
r_dot_txt.as_output(),
"--res-path",
res.as_output(),
"--assets-path",
assets.as_output(),
"--jni-path",
jni.as_output(),
"--annotation-jars-dir",
annotation_jars_dir.as_output(),
"--proguard-config-path",
proguard_config.as_output(),
"--jar-tool",
jar_tool,
]
ctx.actions.run(unpack_aar_cmd, category = "android_unpack_aar")
resource_info = AndroidResourceInfo(
raw_target = ctx.label.raw_target(),
aapt2_compile_output = aapt2_compile(ctx, res, android_toolchain),
allow_strings_as_assets_resource_filtering = True,
assets = assets,
manifest_file = manifest,
r_dot_java_package = extract_package_from_manifest(ctx, manifest),
res = res,
text_symbols = r_dot_txt,
)
abi = None if java_toolchain.is_bootstrap_toolchain else create_abi(ctx.actions, java_toolchain.class_abi_generator, all_classes_jar)
library_output_classpath_entry = JavaClasspathEntry(
full_library = all_classes_jar,
abi = abi or all_classes_jar,
abi_as_dir = None,
required_for_source_only_abi = ctx.attrs.required_for_source_only_abi,
)
java_library_info, java_packaging_info, shared_library_info, cxx_resource_info, template_placeholder_info, java_library_intellij_info = create_java_library_providers(
ctx = ctx,
library_output = library_output_classpath_entry,
exported_deps = ctx.attrs.deps,
needs_desugar = True,
is_prebuilt_jar = True,
annotation_jars_dir = annotation_jars_dir,
proguard_config = proguard_config,
)
native_library = PrebuiltNativeLibraryDir(
raw_target = ctx.label.raw_target(),
dir = jni,
for_primary_apk = ctx.attrs.use_system_library_loader,
is_asset = False,
)
return [
java_library_info,
java_packaging_info,
shared_library_info,
cxx_resource_info,
template_placeholder_info,
java_library_intellij_info,
merge_android_packageable_info(ctx.label, ctx.actions, ctx.attrs.deps, manifest = manifest, prebuilt_native_library_dir = native_library, resource_info = resource_info),
resource_info,
DefaultInfo(default_output = all_classes_jar, other_outputs = [
manifest,
r_dot_txt,
res,
assets,
jni,
annotation_jars_dir,
proguard_config,
]),
]

View File

@ -0,0 +1,291 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
Aapt2LinkInfo = record(
# "APK" containing resources to be used by the Android binary
primary_resources_apk = "artifact",
# proguard config needed to retain used resources
proguard_config_file = "artifact",
# R.txt containing all the linked resources
r_dot_txt = "artifact",
)
AndroidBinaryNativeLibsInfo = record(
apk_under_test_prebuilt_native_library_dirs = ["PrebuiltNativeLibraryDir"],
apk_under_test_shared_libraries = ["SharedLibrary"],
exopackage_info = ["ExopackageNativeInfo", None],
native_lib_assets = ["artifact"],
native_libs_for_primary_apk = ["artifact"],
unstripped_libs = ["artifact"],
)
AndroidBinaryResourcesInfo = record(
# Optional information about resources that should be exopackaged
exopackage_info = ["ExopackageResourcesInfo", None],
# manifest to be used by the APK
manifest = "artifact",
# per-module manifests (packaged as assets)
module_manifests = ["artifact"],
# zip containing any strings packaged as assets
packaged_string_assets = ["artifact", None],
# "APK" containing resources to be used by the Android binary
primary_resources_apk = "artifact",
# proguard config needed to retain used resources
proguard_config_file = "artifact",
# R.java jars containing all the linked resources
r_dot_javas = ["JavaLibraryInfo"],
# directory containing filtered string resources files
string_source_map = ["artifact", None],
# directory containing filtered string resources files for Voltron language packs
voltron_string_source_map = ["artifact", None],
# list of jars that could contain resources that should be packaged into the APK
jar_files_that_may_contain_resources = ["artifact"],
# The resource infos that are used in this APK
unfiltered_resource_infos = ["AndroidResourceInfo"],
)
# Information about an `android_build_config`
BuildConfigField = record(
type = str.type,
name = str.type,
value = str.type,
)
AndroidBuildConfigInfo = provider(
fields = [
"package", # str.type
"build_config_fields", # ["BuildConfigField"]
],
)
# Information about an `android_manifest`
AndroidManifestInfo = provider(
fields = [
"manifest", # artifact
"merge_report", # artifact
],
)
AndroidApkInfo = provider(
fields = [
"apk",
"manifest",
],
)
AndroidApkUnderTestInfo = provider(
fields = [
"java_packaging_deps", # set_type("JavaPackagingDep")
"keystore", # "KeystoreInfo"
"manifest_entries", # dict.type
"prebuilt_native_library_dirs", # set_type("PrebuiltNativeLibraryDir")
"platforms", # [str.type]
"primary_platform", # str.type
"resource_infos", # set_type("ResourceInfos")
"shared_libraries", # set_type("SharedLibrary")
],
)
AndroidInstrumentationApkInfo = provider(
fields = [
"apk_under_test", # "artifact"
],
)
PrebuiltNativeLibraryDir = record(
raw_target = "target_label",
dir = "artifact", # contains subdirectories for different ABIs.
for_primary_apk = bool.type,
is_asset = bool.type,
)
def _artifacts(value: "ManifestInfo"):
return value.manifest
AndroidBuildConfigInfoTSet = transitive_set()
AndroidDepsTSet = transitive_set()
ManifestTSet = transitive_set(args_projections = {"artifacts": _artifacts})
PrebuiltNativeLibraryDirTSet = transitive_set()
ResourceInfoTSet = transitive_set()
DepsInfo = record(
name = "target_label",
deps = ["target_label"],
)
ManifestInfo = record(
target_label = "target_label",
manifest = "artifact",
)
AndroidPackageableInfo = provider(
fields = [
"target_label", # "target_label"
"build_config_infos", # ["AndroidBuildConfigInfoTSet", None]
"deps", # ["AndroidDepsTSet", None]
"manifests", # ["ManifestTSet", None]
"prebuilt_native_library_dirs", # ["PrebuiltNativeLibraryDirTSet", None]
"resource_infos", # ["AndroidResourceInfoTSet", None]
],
)
# Information about an `android_resource`
AndroidResourceInfo = provider(
fields = [
# Target that produced this provider
"raw_target", # "target_label",
# output of running `aapt2_compile` on the resources, if resources are present
"aapt2_compile_output", # ["artifact", None]
# if False, then the "res" are not affected by the strings-as-assets resource filter
"allow_strings_as_assets_resource_filtering", # bool.type
# assets defined by this rule. May be empty
"assets", # ["artifact", None]
# manifest file used by the resources, if resources are present
"manifest_file", # ["artifact", None]
# package used for R.java, if resources are present
"r_dot_java_package", # ["artifact", None]
# resources defined by this rule. May be empty
"res", # ["artifact", None]
# symbols defined by the resources, if resources are present
"text_symbols", # ["artifact", None]
],
)
# `AndroidResourceInfos` that are exposed via `exported_deps`
ExportedAndroidResourceInfo = provider(
fields = [
"resource_infos", # ["AndroidResourceInfo"]
],
)
ExopackageDexInfo = record(
metadata = "artifact",
directory = "artifact",
)
ExopackageNativeInfo = record(
metadata = "artifact",
directory = "artifact",
)
ExopackageResourcesInfo = record(
assets = ["artifact", None],
assets_hash = ["artifact", None],
res = "artifact",
res_hash = "artifact",
third_party_jar_resources = "artifact",
third_party_jar_resources_hash = "artifact",
)
DexFilesInfo = record(
primary_dex = "artifact",
primary_dex_class_names = ["artifact", None],
secondary_dex_dirs = ["artifact"],
secondary_dex_exopackage_info = [ExopackageDexInfo.type, None],
proguard_text_files_path = ["artifact", None],
)
ExopackageInfo = record(
secondary_dex_info = [ExopackageDexInfo.type, None],
native_library_info = [ExopackageNativeInfo.type, None],
resources_info = [ExopackageResourcesInfo.type, None],
)
AndroidLibraryIntellijInfo = provider(
doc = "Information about android library that is required for Intellij project generation",
fields = [
"dummy_r_dot_java", # ["artifact", None]
"android_resource_deps", # ["AndroidResourceInfo"]
],
)
def merge_android_packageable_info(
label: "label",
actions: "actions",
deps: ["dependency"],
build_config_info: ["AndroidBuildConfigInfo", None] = None,
manifest: ["artifact", None] = None,
prebuilt_native_library_dir: [PrebuiltNativeLibraryDir.type, None] = None,
resource_info: ["AndroidResourceInfo", None] = None) -> "AndroidPackageableInfo":
android_packageable_deps = filter(None, [x.get(AndroidPackageableInfo) for x in deps])
build_config_infos = _get_transitive_set(
actions,
filter(None, [dep.build_config_infos for dep in android_packageable_deps]),
build_config_info,
AndroidBuildConfigInfoTSet,
)
deps = _get_transitive_set(
actions,
filter(None, [dep.deps for dep in android_packageable_deps]),
DepsInfo(
name = label.raw_target(),
deps = [dep.target_label for dep in android_packageable_deps],
),
AndroidDepsTSet,
)
manifests = _get_transitive_set(
actions,
filter(None, [dep.manifests for dep in android_packageable_deps]),
ManifestInfo(
target_label = label.raw_target(),
manifest = manifest,
) if manifest else None,
ManifestTSet,
)
prebuilt_native_library_dirs = _get_transitive_set(
actions,
filter(None, [dep.prebuilt_native_library_dirs for dep in android_packageable_deps]),
prebuilt_native_library_dir,
PrebuiltNativeLibraryDirTSet,
)
resource_infos = _get_transitive_set(
actions,
filter(None, [dep.resource_infos for dep in android_packageable_deps]),
resource_info,
ResourceInfoTSet,
)
return AndroidPackageableInfo(
target_label = label.raw_target(),
build_config_infos = build_config_infos,
deps = deps,
manifests = manifests,
prebuilt_native_library_dirs = prebuilt_native_library_dirs,
resource_infos = resource_infos,
)
def _get_transitive_set(
actions: "actions",
children: ["transitive_set"],
node: "_a",
transitive_set_definition: "transitive_set_definition") -> ["transitive_set", None]:
kwargs = {}
if children:
kwargs["children"] = children
if node:
kwargs["value"] = node
return actions.tset(transitive_set_definition, **kwargs) if kwargs else None
def merge_exported_android_resource_info(
exported_deps: ["dependency"]) -> "ExportedAndroidResourceInfo":
exported_android_resource_infos = []
for exported_dep in exported_deps:
exported_resource_info = exported_dep.get(ExportedAndroidResourceInfo)
if exported_resource_info:
exported_android_resource_infos += exported_resource_info.resource_infos
android_resource = exported_dep.get(AndroidResourceInfo)
if android_resource:
exported_android_resource_infos.append(android_resource)
return ExportedAndroidResourceInfo(resource_infos = dedupe(exported_android_resource_infos))

View File

@ -0,0 +1,145 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//java:java_providers.bzl", "get_java_packaging_info")
load("@prelude//utils:utils.bzl", "expect")
load(":android_providers.bzl", "AndroidResourceInfo", "ExportedAndroidResourceInfo", "merge_android_packageable_info")
load(":android_toolchain.bzl", "AndroidToolchainInfo")
JAVA_PACKAGE_FILENAME = "java_package.txt"
def _convert_to_artifact_dir(ctx: "context", attr: ["dependency", "dict", "artifact", None], attr_name: str.type) -> ["artifact", None]:
if type(attr) == "dependency":
expect(len(attr[DefaultInfo].default_outputs) == 1, "Expect one default output from build dep of attr {}!".format(attr_name))
return attr[DefaultInfo].default_outputs[0]
elif type(attr) == "dict":
return None if len(attr) == 0 else ctx.actions.symlinked_dir("{}_dir".format(attr_name), attr)
else:
return attr
def android_resource_impl(ctx: "context") -> ["provider"]:
if ctx.attrs._build_only_native_code:
return [DefaultInfo()]
# TODO(T100007184) filter res/assets by ignored filenames
sub_targets = {}
providers = []
default_output = None
res = _convert_to_artifact_dir(ctx, ctx.attrs.res, "res")
assets = _convert_to_artifact_dir(ctx, ctx.attrs.assets, "assets")
if res:
aapt2_compile_output = aapt2_compile(ctx, res, ctx.attrs._android_toolchain[AndroidToolchainInfo])
sub_targets["aapt2_compile"] = [DefaultInfo(default_output = aapt2_compile_output)]
r_dot_txt_output = get_text_symbols(ctx, res, ctx.attrs.deps)
default_output = r_dot_txt_output
r_dot_java_package = _get_package(ctx, ctx.attrs.package, ctx.attrs.manifest)
resource_info = AndroidResourceInfo(
raw_target = ctx.label.raw_target(),
aapt2_compile_output = aapt2_compile_output,
allow_strings_as_assets_resource_filtering = not ctx.attrs.has_whitelisted_strings,
assets = assets,
manifest_file = ctx.attrs.manifest,
r_dot_java_package = r_dot_java_package,
res = res,
text_symbols = r_dot_txt_output,
)
else:
resource_info = AndroidResourceInfo(
raw_target = ctx.label.raw_target(),
aapt2_compile_output = None,
allow_strings_as_assets_resource_filtering = not ctx.attrs.has_whitelisted_strings,
assets = assets,
manifest_file = ctx.attrs.manifest,
r_dot_java_package = None,
res = None,
text_symbols = None,
)
providers.append(resource_info)
providers.append(merge_android_packageable_info(ctx.label, ctx.actions, ctx.attrs.deps, manifest = ctx.attrs.manifest, resource_info = resource_info))
providers.append(get_java_packaging_info(ctx, ctx.attrs.deps))
providers.append(DefaultInfo(default_output = default_output, sub_targets = sub_targets))
return providers
def aapt2_compile(
ctx: "context",
resources_dir: "artifact",
android_toolchain: "AndroidToolchainInfo",
skip_crunch_pngs: bool.type = False,
identifier: [str.type, None] = None) -> "artifact":
aapt2_command = cmd_args(android_toolchain.aapt2)
aapt2_command.add("compile")
aapt2_command.add("--legacy")
if skip_crunch_pngs:
aapt2_command.add("--no-crunch")
aapt2_command.add(["--dir", resources_dir])
aapt2_output = ctx.actions.declare_output("{}_resources.flata".format(identifier) if identifier else "resources.flata")
aapt2_command.add("-o", aapt2_output.as_output())
ctx.actions.run(aapt2_command, category = "aapt2_compile", identifier = identifier)
return aapt2_output
def _get_package(ctx: "context", package: [str.type, None], manifest: ["artifact", None]) -> "artifact":
if package:
return ctx.actions.write(JAVA_PACKAGE_FILENAME, package)
else:
expect(manifest != None, "if package is not declared then a manifest must be")
return extract_package_from_manifest(ctx, manifest)
def extract_package_from_manifest(ctx: "context", manifest: "artifact") -> "artifact":
r_dot_java_package = ctx.actions.declare_output(JAVA_PACKAGE_FILENAME)
extract_package_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].manifest_utils[RunInfo])
extract_package_cmd.add(["--manifest-path", manifest])
extract_package_cmd.add(["--package-output", r_dot_java_package.as_output()])
ctx.actions.run(extract_package_cmd, category = "android_extract_package")
return r_dot_java_package
def get_text_symbols(
ctx: "context",
res: "artifact",
deps: ["dependency"],
identifier: [str.type, None] = None):
mini_aapt_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].mini_aapt[RunInfo])
mini_aapt_cmd.add(["--resource-paths", res])
dep_symbol_paths = cmd_args()
dep_symbols = _get_dep_symbols(deps)
dep_symbol_paths.add(dep_symbols)
dep_symbol_paths_file, _ = ctx.actions.write("{}_dep_symbol_paths_file".format(identifier) if identifier else "dep_symbol_paths_file", dep_symbol_paths, allow_args = True)
mini_aapt_cmd.add(["--dep-symbol-paths", dep_symbol_paths_file])
mini_aapt_cmd.hidden(dep_symbols)
text_symbols = ctx.actions.declare_output("{}_R.txt".format(identifier) if identifier else "R.txt")
mini_aapt_cmd.add(["--output-path", text_symbols.as_output()])
ctx.actions.run(mini_aapt_cmd, category = "mini_aapt", identifier = identifier)
return text_symbols
def _get_dep_symbols(deps: ["dependency"]) -> ["artifact"]:
dep_symbols = []
for dep in deps:
android_resource_info = dep.get(AndroidResourceInfo)
exported_android_resource_info = dep.get(ExportedAndroidResourceInfo)
expect(android_resource_info != None or exported_android_resource_info != None, "Dependencies of `android_resource` rules should be `android_resource`s or `android_library`s")
if android_resource_info and android_resource_info.text_symbols:
dep_symbols.append(android_resource_info.text_symbols)
if exported_android_resource_info:
dep_symbols += [resource_info.text_symbols for resource_info in exported_android_resource_info.resource_infos if resource_info.text_symbols]
return dedupe(dep_symbols)

View File

@ -0,0 +1,55 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
AndroidPlatformInfo = provider(fields = [
"name",
])
AndroidToolchainInfo = provider(fields = [
"aapt2",
"adb",
"aidl",
"android_jar",
"android_bootclasspath",
"apk_builder",
"apk_module_graph",
"combine_native_library_dirs",
"compress_libraries",
"d8_command",
"exo_resources_rewriter",
"exopackage_agent_apk",
"filter_dex_class_names",
"filter_prebuilt_native_library_dir",
"installer",
"multi_dex_command",
"copy_string_resources",
"filter_resources",
"framework_aidl_file",
"generate_build_config",
"generate_manifest",
"instrumentation_test_can_run_locally",
"instrumentation_test_runner_classpath",
"instrumentation_test_runner_main_class",
"manifest_utils",
"merge_android_resources",
"merge_assets",
"merge_third_party_jar_resources",
"mini_aapt",
"native_libs_as_assets_metadata",
"optimized_proguard_config",
"package_strings_as_assets",
"proguard_config",
"proguard_jar",
"proguard_max_heap_size",
"r_dot_java_weight_factor",
"replace_application_id_placeholders",
"secondary_dex_weight_limit",
"set_application_id_to_specified_package",
"should_run_sanity_check_for_placeholders",
"unpack_aar",
"zipalign",
])

View File

@ -0,0 +1,44 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:genrule.bzl", "process_genrule")
load("@prelude//android:android_apk.bzl", "get_install_info")
load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidApkUnderTestInfo")
load("@prelude//utils:utils.bzl", "expect")
def apk_genrule_impl(ctx: "context") -> ["provider"]:
# TODO(T104150125) The underlying APK should not have exopackage enabled
input_android_apk_info = ctx.attrs.apk[AndroidApkInfo]
expect(input_android_apk_info != None, "'apk' attribute must be an Android APK!")
input_android_apk_under_test_info = ctx.attrs.apk[AndroidApkUnderTestInfo]
env_vars = {
"APK": cmd_args(input_android_apk_info.apk),
}
# Like buck1, we ignore the 'out' attribute and construct the output path ourselves.
output_apk_name = "{}.apk".format(ctx.label.name)
genrule_providers = process_genrule(ctx, output_apk_name, None, env_vars)
expect(len(genrule_providers) == 1 and type(genrule_providers[0]) == DefaultInfo.type, "Expecting just a single DefaultInfo, but got {}".format(genrule_providers))
output_apk = genrule_providers[0].default_outputs[0]
install_info = get_install_info(
ctx,
output_apk = output_apk,
manifest = input_android_apk_info.manifest,
exopackage_info = None,
)
return genrule_providers + [
AndroidApkInfo(
apk = output_apk,
manifest = input_android_apk_info.manifest,
),
install_info,
] + filter(None, [input_android_apk_under_test_info])

View File

@ -0,0 +1,221 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:cpu_filters.bzl", "ALL_CPU_FILTERS", "CPU_FILTER_FOR_DEFAULT_PLATFORM")
load("@prelude//android:min_sdk_version.bzl", "get_min_sdk_version_constraint_value_name", "get_min_sdk_version_range")
_REFS = {
"arm64": "config//cpu/constraints:arm64",
"armv7": "config//cpu/constraints:arm32",
"build_only_native_code": "fbsource//xplat/buck2/platform/android:build_only_native_code",
"building_android_binary": "prelude//os:building_android_binary",
"cpu": "config//cpu/constraints:cpu",
"default_platform": "config//platform/android:x86_32-fbsource",
"do_not_build_only_native_code": "fbsource//xplat/buck2/platform/android:do_not_build_only_native_code",
"maybe_build_only_native_code": "fbsource//xplat/buck2/platform/android:maybe_build_only_native_code",
"maybe_building_android_binary": "prelude//os:maybe_building_android_binary",
"min_sdk_version": "fbsource//xplat/buck2/platform/android:min_sdk_version",
"x86": "config//cpu/constraints:x86_32",
"x86_64": "config//cpu/constraints:x86_64",
}
for min_sdk in get_min_sdk_version_range():
constraint_value_name = get_min_sdk_version_constraint_value_name(min_sdk)
_REFS[constraint_value_name] = "fbsource//xplat/buck2/platform/android:{}".format(constraint_value_name)
def _cpu_split_transition_instrumentation_test_apk_impl(
platform: PlatformInfo.type,
refs: struct.type,
attrs: struct.type) -> {str.type: PlatformInfo.type}:
cpu_filters = attrs.cpu_filters or ALL_CPU_FILTERS
if attrs._is_force_single_cpu:
cpu_filters = cpu_filters[0:1]
elif attrs._is_force_single_default_cpu:
cpu_filters = ["default"]
return _cpu_split_transition(
platform,
refs,
cpu_filters,
attrs.min_sdk_version,
build_only_native_code_on_secondary_platforms = False,
)
def _cpu_split_transition_impl(
platform: PlatformInfo.type,
refs: struct.type,
attrs: struct.type) -> {str.type: PlatformInfo.type}:
cpu_filters = attrs.cpu_filters or ALL_CPU_FILTERS
if attrs._is_force_single_cpu:
cpu_filters = cpu_filters[0:1]
elif attrs._is_force_single_default_cpu:
cpu_filters = ["default"]
do_not_build_only_native_code = refs.do_not_build_only_native_code[ConstraintValueInfo].label in [constraint.label for constraint in platform.configuration.constraints.values()]
return _cpu_split_transition(
platform,
refs,
cpu_filters,
attrs.min_sdk_version,
build_only_native_code_on_secondary_platforms = not do_not_build_only_native_code,
)
def _cpu_split_transition(
platform: PlatformInfo.type,
refs: struct.type,
cpu_filters: [str.type],
min_sdk_version: [int.type, None],
build_only_native_code_on_secondary_platforms: bool.type) -> {str.type: PlatformInfo.type}:
cpu = refs.cpu
x86 = refs.x86[ConstraintValueInfo]
x86_64 = refs.x86_64[ConstraintValueInfo]
armv7 = refs.armv7[ConstraintValueInfo]
arm64 = refs.arm64[ConstraintValueInfo]
if len(cpu_filters) == 1 and cpu_filters[0] == "default":
default = refs.default_platform[PlatformInfo]
return {CPU_FILTER_FOR_DEFAULT_PLATFORM: default}
cpu_name_to_cpu_constraint = {}
for cpu_filter in cpu_filters:
if cpu_filter == "x86":
cpu_name_to_cpu_constraint["x86"] = x86
elif cpu_filter == "armv7":
cpu_name_to_cpu_constraint["armv7"] = armv7
elif cpu_filter == "x86_64":
cpu_name_to_cpu_constraint["x86_64"] = x86_64
elif cpu_filter == "arm64":
cpu_name_to_cpu_constraint["arm64"] = arm64
else:
fail("Unexpected cpu_filter: {}".format(cpu_filter))
base_constraints = {
constraint_setting_label: constraint_setting_value
for (constraint_setting_label, constraint_setting_value) in platform.configuration.constraints.items()
if constraint_setting_label != cpu[ConstraintSettingInfo].label and constraint_setting_label != refs.maybe_build_only_native_code[ConstraintSettingInfo].label
}
base_constraints[refs.maybe_building_android_binary[ConstraintSettingInfo].label] = refs.building_android_binary[ConstraintValueInfo]
if min_sdk_version:
base_constraints[refs.min_sdk_version[ConstraintSettingInfo].label] = _get_min_sdk_constraint_value(min_sdk_version, refs)
new_configs = {}
for platform_name, cpu_constraint in cpu_name_to_cpu_constraint.items():
updated_constraints = dict(base_constraints)
updated_constraints[refs.cpu[ConstraintSettingInfo].label] = cpu_constraint
if len(new_configs) > 0 and build_only_native_code_on_secondary_platforms:
updated_constraints[refs.maybe_build_only_native_code[ConstraintSettingInfo].label] = refs.build_only_native_code[ConstraintValueInfo]
new_configs[platform_name] = PlatformInfo(
label = platform_name,
configuration = ConfigurationInfo(
constraints = updated_constraints,
values = platform.configuration.values,
),
)
return new_configs
def _cpu_transition_impl(
platform: PlatformInfo.type,
refs: struct.type,
attrs: struct.type) -> PlatformInfo.type:
return _cpu_split_transition_impl(platform, refs, attrs).values()[0]
cpu_split_transition = transition(
impl = _cpu_split_transition_impl,
refs = _REFS,
attrs = [
"cpu_filters",
"min_sdk_version",
"_is_force_single_cpu",
"_is_force_single_default_cpu",
],
split = True,
)
cpu_split_transition_instrumentation_test_apk = transition(
impl = _cpu_split_transition_instrumentation_test_apk_impl,
refs = _REFS,
attrs = [
"cpu_filters",
"min_sdk_version",
"_is_force_single_cpu",
"_is_force_single_default_cpu",
],
split = True,
)
# If our deps have been split-transitioned by CPU then we are already analyzing the dependency
# graph using the resulting configurations. If there are any other attributes on the same target
# that also need to analyze the dependency graph, then we want to use one of the configurations
# from the split transition so that we don't end up analyzing the graph again using a different
# configuration. This rule just picks the first configuration from the split-transition.
#
# This is used for the `manifest` attribute of `android_binary`.
cpu_transition = transition(
impl = _cpu_transition_impl,
refs = _REFS,
attrs = [
"cpu_filters",
"min_sdk_version",
"_is_force_single_cpu",
"_is_force_single_default_cpu",
],
)
def _do_not_build_only_native_code_transition(
platform: PlatformInfo.type,
refs: struct.type) -> PlatformInfo.type:
constraints = dict(platform.configuration.constraints.items())
constraints[refs.maybe_build_only_native_code[ConstraintSettingInfo].label] = refs.do_not_build_only_native_code[ConstraintValueInfo]
return PlatformInfo(
label = platform.label,
configuration = ConfigurationInfo(
constraints = constraints,
values = platform.configuration.values,
),
)
do_not_build_only_native_code_transition = transition(
impl = _do_not_build_only_native_code_transition,
refs = {
"do_not_build_only_native_code": "fbsource//xplat/buck2/platform/android:do_not_build_only_native_code",
"maybe_build_only_native_code": "fbsource//xplat/buck2/platform/android:maybe_build_only_native_code",
},
)
def get_deps_by_platform(ctx: "context") -> {str.type: ["dependency"]}:
deps_by_platform = {}
for dep_dict in ctx.attrs.deps:
for platform, dep in dep_dict.items():
deps = deps_by_platform.get(platform, [])
deps.append(dep)
deps_by_platform[platform] = deps
return deps_by_platform
def _get_min_sdk_constraint_value(min_sdk_version: int.type, refs: struct.type) -> ConstraintValueInfo.type:
constraint_name = get_min_sdk_version_constraint_value_name(min_sdk_version)
constraint = getattr(refs, constraint_name, None)
if not constraint:
fail("Unsupported min_sdk_version {}, please report!".format(min_sdk_version))
return constraint[ConstraintValueInfo]
def _is_building_android_binary() -> "selector":
return select(
{
"DEFAULT": False,
"prelude//os:building_android_binary": True,
},
)
def is_building_android_binary_attr() -> "attribute":
return attrs.default_only(attrs.bool(default = _is_building_android_binary()))

View File

@ -0,0 +1,17 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
CPU_FILTER_TO_ABI_DIRECTORY = {
"arm64": "arm64-v8a",
"armv7": "armeabi-v7a",
"x86": "x86",
"x86_64": "x86_64",
}
ALL_CPU_FILTERS = CPU_FILTER_TO_ABI_DIRECTORY.keys()
CPU_FILTER_FOR_DEFAULT_PLATFORM = "x86"

View File

@ -0,0 +1,678 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_providers.bzl", "DexFilesInfo", "ExopackageDexInfo")
load("@prelude//android:voltron.bzl", "ROOT_MODULE", "get_apk_module_graph_info", "get_root_module_only_apk_module_graph_info", "is_root_module")
load("@prelude//java:dex.bzl", "get_dex_produced_from_java_library")
load("@prelude//java:dex_toolchain.bzl", "DexToolchainInfo")
load("@prelude//java:java_library.bzl", "compile_to_jar")
load("@prelude//utils:utils.bzl", "expect", "flatten")
load("@prelude//paths.bzl", "paths")
# Android builds use a tool called `d8` to compile Java bytecode is DEX (Dalvik EXecutable)
# bytecode that runs on Android devices. Our Android builds have two distinct ways of
# doing that:
# 1) With pre-dexing enabled (this is the most common case for debug builds). That means that
# d8 runs on every individual .jar file (to produce a .jar.dex file), and then at the APK
# level we run d8 again to combine all the individual .jar.dex files.
# 2) With pre-dexing disabled (this is the case if it is explicitly disabled, we are running
# proguard, or we preprocess the java classes at the APK level). This means that we run
# d8 at the APK level on all the .jar files.
#
# The .dex files that we package into the APK consist of a single classes.dex "primary DEX"
# file, and N secondary DEX files. The classes that are put into the primary DEX are those
# that are required at startup, and are specified via `primary_dex_patterns` (classes which
# match one of those patterns are put into the primary DEX).
#
# The primary DEX is always stored in the root directory of the APK as `classes.dex`.
#
# We have 4 different ways of storing our secondary DEX files, which are specified via the
# `dex_compression` attribute:
# 1) `raw` compression. This means that we create `classes2.dex`, `classes3.dex`, ...,
# `classesN.dex` and store each of them in the root directory of the APK.
# 2) `jar` compression. For each secondary DEX file, we put a `classes.dex` entry into a
# JAR file, and store it as an asset at `assets/secondary-program-dex-jars/secondary-I.dex.jar`
# 3) `xz` compression. This is the same as `jar` compression, except that we run `xz` on the
# JAR file to produce `assets/secondary-program-dex-jars/secondary-I.dex.jar.xz`.
# 4) `xzs` compression. We do the same as `jar` compression, then concatenate all the jars
# together and do `xz` compression on the result to produce a single
# `assets/secondary-program-dex-jars/secondary.dex.jar.xzs`.
#
# For all compression types, we also package a `assets/secondary-program-dex-jars/metadata.txt`,
# which has an entry for each secondary DEX file:
# <secondary DEX file name> <sha1 hash of secondary DEX> <canary class name>
#
# A "canary class" is a Java class that we add to every secondary DEX. It is a known class that
# can be used for DEX verification when loading the DEX on a device.
#
# For compression types other than raw, we also include a metadata file per secondary DEX, which
# consists of a single line of the form:
# jar:<size of secondary dex jar (in bytes)> dex:<size of uncompressed dex file (in bytes)>
#
# If an APK has Voltron modules, then we produce a separate group of secondary DEX files for each
# module, and we put them into `assets/<module_name>` instead of `assets/secondary-program-dex-jars`.
# We produce a `metadata.txt` file for each Voltron module.
_DEX_MERGE_OPTIONS = ["--no-desugar", "--no-optimize"]
SplitDexMergeConfig = record(
dex_compression = str.type,
primary_dex_patterns = [str.type],
secondary_dex_weight_limit_bytes = int.type,
)
def _get_dex_compression(ctx: "context") -> str.type:
is_exopackage_enabled_for_secondary_dexes = "secondary_dex" in ctx.attrs.exopackage_modes
default_dex_compression = "jar" if is_exopackage_enabled_for_secondary_dexes else "raw"
dex_compression = ctx.attrs.dex_compression or default_dex_compression
expect(
dex_compression in ["raw", "jar", "xz", "xzs"],
"Only 'raw', 'jar', 'xz' and 'xzs' dex compression are supported at this time!",
)
return dex_compression
def get_split_dex_merge_config(
ctx: "context",
android_toolchain: "AndroidToolchainInfo") -> "SplitDexMergeConfig":
return SplitDexMergeConfig(
dex_compression = _get_dex_compression(ctx),
primary_dex_patterns = ctx.attrs.primary_dex_patterns,
secondary_dex_weight_limit_bytes = (
ctx.attrs.secondary_dex_weight_limit or
android_toolchain.secondary_dex_weight_limit
),
)
def get_single_primary_dex(
ctx: "context",
android_toolchain: "AndroidToolchainInfo",
java_library_jars: ["artifact"],
is_optimized: bool.type) -> "DexFilesInfo":
expect(
not _is_exopackage_enabled_for_secondary_dex(ctx),
"It doesn't make sense to enable secondary dex exopackage for single dex builds!",
)
d8_cmd = cmd_args(android_toolchain.d8_command[RunInfo])
output_dex_file = ctx.actions.declare_output("classes.dex")
d8_cmd.add(["--output-dex-file", output_dex_file.as_output()])
jar_to_dex_file = ctx.actions.write("jar_to_dex_file.txt", java_library_jars)
d8_cmd.add(["--files-to-dex-list", jar_to_dex_file])
d8_cmd.hidden(java_library_jars)
d8_cmd.add(["--android-jar", android_toolchain.android_jar])
if not is_optimized:
d8_cmd.add("--no-optimize")
ctx.actions.run(d8_cmd, category = "d8", identifier = "{}:{}".format(ctx.label.package, ctx.label.name))
return DexFilesInfo(
primary_dex = output_dex_file,
secondary_dex_dirs = [],
secondary_dex_exopackage_info = None,
proguard_text_files_path = None,
primary_dex_class_names = None,
)
def get_multi_dex(
ctx: "context",
android_toolchain: "AndroidToolchainInfo",
java_library_jars_to_owners: {"artifact": "target_label"},
primary_dex_patterns: [str.type],
proguard_configuration_output_file: ["artifact", None],
proguard_mapping_output_file: ["artifact", None],
is_optimized: bool.type,
apk_module_graph_file: ["artifact", None] = None) -> "DexFilesInfo":
expect(
not _is_exopackage_enabled_for_secondary_dex(ctx),
"secondary dex exopackage can only be enabled on pre-dexed builds!",
)
primary_dex_file = ctx.actions.declare_output("classes.dex")
primary_dex_class_names = ctx.actions.declare_output("primary_dex_class_names.txt")
root_module_secondary_dex_output_dir = ctx.actions.declare_output("root_module_secondary_dex_output_dir", dir = True)
secondary_dex_dir = ctx.actions.declare_output("secondary_dex_output_dir", dir = True)
# dynamic actions are not valid with no input, but it's easier to use the same code regardless,
# so just create an empty input.
inputs = [apk_module_graph_file] if apk_module_graph_file else [ctx.actions.write("empty_artifact_for_multi_dex_dynamic_action", [])]
outputs = [primary_dex_file, primary_dex_class_names, root_module_secondary_dex_output_dir, secondary_dex_dir]
def do_multi_dex(ctx: "context", artifacts, outputs):
apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) if apk_module_graph_file else get_root_module_only_apk_module_graph_info()
target_to_module_mapping_function = apk_module_graph_info.target_to_module_mapping_function
module_to_jars = {}
for java_library_jar, owner in java_library_jars_to_owners.items():
module = target_to_module_mapping_function(str(owner))
module_to_jars.setdefault(module, []).append(java_library_jar)
secondary_dex_dir_srcs = {}
for module, jars in module_to_jars.items():
multi_dex_cmd = cmd_args(android_toolchain.multi_dex_command[RunInfo])
if is_root_module(module):
multi_dex_cmd.add("--primary-dex", outputs[primary_dex_file].as_output())
multi_dex_cmd.add("--primary-dex-patterns-path", ctx.actions.write("primary_dex_patterns", primary_dex_patterns))
multi_dex_cmd.add("--primary-dex-class-names", outputs[primary_dex_class_names].as_output())
multi_dex_cmd.add("--secondary-dex-output-dir", outputs[root_module_secondary_dex_output_dir].as_output())
else:
secondary_dex_dir_for_module = ctx.actions.declare_output("secondary_dex_output_dir_for_module_{}".format(module), dir = True)
secondary_dex_subdir = secondary_dex_dir_for_module.project(_get_secondary_dex_subdir(module))
secondary_dex_dir_srcs[_get_secondary_dex_subdir(module)] = secondary_dex_subdir
multi_dex_cmd.add("--secondary-dex-output-dir", secondary_dex_dir_for_module.as_output())
multi_dex_cmd.add("--module-deps", ctx.actions.write("module_deps_for_{}".format(module), apk_module_graph_info.module_to_module_deps_function(module)))
multi_dex_cmd.add("--module", module)
multi_dex_cmd.add("--canary-class-name", apk_module_graph_info.module_to_canary_class_name_function(module))
jar_to_dex_file = ctx.actions.write("jars_to_dex_file_for_module_{}.txt".format(module), jars)
multi_dex_cmd.add("--files-to-dex-list", jar_to_dex_file)
multi_dex_cmd.hidden(jars)
multi_dex_cmd.add("--android-jar", android_toolchain.android_jar)
if not is_optimized:
multi_dex_cmd.add("--no-optimize")
if proguard_configuration_output_file:
multi_dex_cmd.add("--proguard-configuration-file", proguard_configuration_output_file)
multi_dex_cmd.add("--proguard-mapping-file", proguard_mapping_output_file)
multi_dex_cmd.add("--compression", _get_dex_compression(ctx))
multi_dex_cmd.add("--xz-compression-level", str(ctx.attrs.xz_compression_level))
if ctx.attrs.minimize_primary_dex_size:
multi_dex_cmd.add("--minimize-primary-dex")
ctx.actions.run(multi_dex_cmd, category = "multi_dex", identifier = "{}:{}_module_{}".format(ctx.label.package, ctx.label.name, module))
ctx.actions.symlinked_dir(outputs[secondary_dex_dir], secondary_dex_dir_srcs)
ctx.actions.dynamic_output(dynamic = inputs, inputs = [], outputs = outputs, f = do_multi_dex)
return DexFilesInfo(
primary_dex = primary_dex_file,
secondary_dex_dirs = [root_module_secondary_dex_output_dir, secondary_dex_dir],
secondary_dex_exopackage_info = None,
proguard_text_files_path = None,
primary_dex_class_names = primary_dex_class_names,
)
def merge_to_single_dex(
ctx: "context",
android_toolchain: "AndroidToolchainInfo",
pre_dexed_libs: ["DexLibraryInfo"]) -> "DexFilesInfo":
expect(
not _is_exopackage_enabled_for_secondary_dex(ctx),
"It doesn't make sense to enable secondary dex exopackage for single dex builds!",
)
output_dex_file = ctx.actions.declare_output("classes.dex")
pre_dexed_artifacts_to_dex_file = ctx.actions.declare_output("pre_dexed_artifacts_to_dex_file.txt")
pre_dexed_artifacts = [pre_dexed_lib.dex for pre_dexed_lib in pre_dexed_libs if pre_dexed_lib.dex != None]
_merge_dexes(ctx, android_toolchain, output_dex_file, pre_dexed_artifacts, pre_dexed_artifacts_to_dex_file)
return DexFilesInfo(
primary_dex = output_dex_file,
secondary_dex_dirs = [],
secondary_dex_exopackage_info = None,
proguard_text_files_path = None,
primary_dex_class_names = None,
)
DexInputWithSpecifiedClasses = record(
lib = "DexLibraryInfo",
dex_class_names = [str.type],
)
DexInputWithClassNamesFile = record(
lib = "DexLibraryInfo",
weight_estimate_and_filtered_class_names_file = "artifact",
)
# When using jar compression, the secondary dex directory consists of N secondary dex jars, each
# of which has a corresponding .meta file (the secondary_dex_metadata_file) containing a single
# line of the form:
# jar:<size of secondary dex jar (in bytes)> dex:<size of uncompressed dex file (in bytes)>
#
# It also contains a metadata.txt file, which consists on N lines, one for each secondary dex
# jar. Those lines consist of:
# <secondary dex file name> <sha1 hash of secondary dex> <canary class>
#
# We write the line that needs to be added to metadata.txt for this secondary dex jar to
# secondary_dex_metadata_line, and we use the secondary_dex_canary_class_name for the
# <canary class>.
#
# When we have finished building all of the secondary dexes, we read each of the
# secondary_dex_metadata_line artifacts and write them to a single metadata.txt file.
# We do that for raw compression too, since it also has a metadata.txt file.
SecondaryDexMetadataConfig = record(
secondary_dex_compression = str.type,
secondary_dex_metadata_path = [str.type, None],
secondary_dex_metadata_file = ["artifact", None],
secondary_dex_metadata_line = "artifact",
secondary_dex_canary_class_name = str.type,
)
def _get_secondary_dex_jar_metadata_config(
actions: "actions",
secondary_dex_path: str.type,
module: str.type,
module_to_canary_class_name_function: "function",
index: int.type) -> SecondaryDexMetadataConfig.type:
secondary_dex_metadata_path = secondary_dex_path + ".meta"
return SecondaryDexMetadataConfig(
secondary_dex_compression = "jar",
secondary_dex_metadata_path = secondary_dex_metadata_path,
secondary_dex_metadata_file = actions.declare_output(secondary_dex_metadata_path),
secondary_dex_metadata_line = actions.declare_output("metadata_line_artifacts/{}/{}".format(module, index + 1)),
secondary_dex_canary_class_name = _get_fully_qualified_canary_class_name(module, module_to_canary_class_name_function, index + 1),
)
def _get_secondary_dex_raw_metadata_config(
actions: "actions",
module: str.type,
module_to_canary_class_name_function: "function",
index: int.type) -> SecondaryDexMetadataConfig.type:
return SecondaryDexMetadataConfig(
secondary_dex_compression = "raw",
secondary_dex_metadata_path = None,
secondary_dex_metadata_file = None,
secondary_dex_metadata_line = actions.declare_output("metadata_line_artifacts/{}/{}".format(module, index + 1)),
secondary_dex_canary_class_name = _get_fully_qualified_canary_class_name(module, module_to_canary_class_name_function, index + 1),
)
def _get_filter_dex_batch_size() -> int.type:
return 100
def _filter_pre_dexed_libs(
actions: "actions",
android_toolchain: "AndroidToolchainInfo",
primary_dex_patterns_file: "artifact",
pre_dexed_libs: ["DexLibraryInfo"],
batch_number: int.type) -> [DexInputWithClassNamesFile.type]:
pre_dexed_lib_with_class_names_files = []
for pre_dexed_lib in pre_dexed_libs:
class_names = pre_dexed_lib.class_names
id = "{}_{}_{}".format(class_names.owner.package, class_names.owner.name, class_names.short_path)
weight_estimate_and_filtered_class_names_file = actions.declare_output("primary_dex_class_names_for_{}".format(id))
pre_dexed_lib_with_class_names_files.append(
DexInputWithClassNamesFile(lib = pre_dexed_lib, weight_estimate_and_filtered_class_names_file = weight_estimate_and_filtered_class_names_file),
)
filter_dex_cmd = cmd_args([
android_toolchain.filter_dex_class_names[RunInfo],
"--primary-dex-patterns",
primary_dex_patterns_file,
"--class-names",
[x.lib.class_names for x in pre_dexed_lib_with_class_names_files],
"--weight-estimates",
[x.lib.weight_estimate for x in pre_dexed_lib_with_class_names_files],
"--output",
[x.weight_estimate_and_filtered_class_names_file.as_output() for x in pre_dexed_lib_with_class_names_files],
])
actions.run(filter_dex_cmd, category = "filter_dex", identifier = "batch_{}".format(batch_number))
return pre_dexed_lib_with_class_names_files
_SortedPreDexedInputs = record(
module = str.type,
primary_dex_inputs = [DexInputWithSpecifiedClasses.type],
secondary_dex_inputs = [[DexInputWithSpecifiedClasses.type]],
)
def merge_to_split_dex(
ctx: "context",
android_toolchain: "AndroidToolchainInfo",
pre_dexed_libs: ["DexLibraryInfo"],
split_dex_merge_config: "SplitDexMergeConfig",
apk_module_graph_file: ["artifact", None] = None) -> "DexFilesInfo":
is_exopackage_enabled_for_secondary_dex = _is_exopackage_enabled_for_secondary_dex(ctx)
if is_exopackage_enabled_for_secondary_dex:
expect(
split_dex_merge_config.dex_compression == "jar",
"Exopackage can only be enabled for secondary dexes when the dex compression is 'jar', but the dex compression is '{}'".format(split_dex_merge_config.dex_compression),
)
primary_dex_patterns_file = ctx.actions.write("primary_dex_patterns_file", split_dex_merge_config.primary_dex_patterns)
pre_dexed_lib_with_class_names_files = []
batch_size = _get_filter_dex_batch_size()
for (batch_number, start_index) in enumerate(range(0, len(pre_dexed_libs), batch_size)):
end_index = min(start_index + batch_size, len(pre_dexed_libs))
pre_dexed_lib_with_class_names_files.extend(
_filter_pre_dexed_libs(
ctx.actions,
android_toolchain,
primary_dex_patterns_file,
pre_dexed_libs[start_index:end_index],
batch_number,
),
)
input_artifacts = [
input.weight_estimate_and_filtered_class_names_file
for input in pre_dexed_lib_with_class_names_files
] + ([apk_module_graph_file] if apk_module_graph_file else [])
primary_dex_artifact_list = ctx.actions.declare_output("pre_dexed_artifacts_for_primary_dex.txt")
primary_dex_output = ctx.actions.declare_output("classes.dex")
primary_dex_class_names_list = ctx.actions.declare_output("primary_dex_class_names_list.txt")
root_module_secondary_dexes_dir = ctx.actions.declare_output("root_module_secondary_dexes_dir", dir = True)
root_module_secondary_dexes_subdir = root_module_secondary_dexes_dir.project(_get_secondary_dex_subdir(ROOT_MODULE))
root_module_secondary_dexes_metadata = root_module_secondary_dexes_dir.project(paths.join(_get_secondary_dex_subdir(ROOT_MODULE), "metadata.txt"))
non_root_module_secondary_dexes_dir = ctx.actions.declare_output("non_root_module_secondary_dexes_dir", dir = True)
outputs = [primary_dex_output, primary_dex_artifact_list, primary_dex_class_names_list, root_module_secondary_dexes_dir, non_root_module_secondary_dexes_dir]
def merge_pre_dexed_libs(ctx: "context", artifacts, outputs):
apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) if apk_module_graph_file else get_root_module_only_apk_module_graph_info()
module_to_canary_class_name_function = apk_module_graph_info.module_to_canary_class_name_function
sorted_pre_dexed_inputs = _sort_pre_dexed_files(
ctx,
artifacts,
pre_dexed_lib_with_class_names_files,
split_dex_merge_config,
get_module_from_target = apk_module_graph_info.target_to_module_mapping_function,
module_to_canary_class_name_function = module_to_canary_class_name_function,
)
root_module_secondary_dexes_for_symlinking = {}
non_root_module_secondary_dexes_for_symlinking = {}
metadata_line_artifacts_by_module = {}
metadata_dot_txt_files_by_module = {}
for sorted_pre_dexed_input in sorted_pre_dexed_inputs:
module = sorted_pre_dexed_input.module
secondary_dexes_for_symlinking = root_module_secondary_dexes_for_symlinking if is_root_module(module) else non_root_module_secondary_dexes_for_symlinking
primary_dex_inputs = sorted_pre_dexed_input.primary_dex_inputs
pre_dexed_artifacts = [primary_dex_input.lib.dex for primary_dex_input in primary_dex_inputs if primary_dex_input.lib.dex]
if pre_dexed_artifacts:
expect(is_root_module(module), "module {} should not have a primary dex!".format(module))
ctx.actions.write(
outputs[primary_dex_class_names_list].as_output(),
flatten([primary_dex_input.dex_class_names for primary_dex_input in primary_dex_inputs]),
)
_merge_dexes(
ctx,
android_toolchain,
outputs[primary_dex_output],
pre_dexed_artifacts,
outputs[primary_dex_artifact_list],
class_names_to_include = primary_dex_class_names_list,
)
else:
expect(
not is_root_module(module),
"No primary dex classes were specified! Please add primary_dex_patterns to ensure that at least one class exists in the primary dex.",
)
secondary_dex_inputs = sorted_pre_dexed_input.secondary_dex_inputs
raw_secondary_dexes_for_compressing = {}
for i in range(len(secondary_dex_inputs)):
if split_dex_merge_config.dex_compression == "jar" or split_dex_merge_config.dex_compression == "raw":
if split_dex_merge_config.dex_compression == "jar":
secondary_dex_path = _get_jar_secondary_dex_path(i, module)
secondary_dex_metadata_config = _get_secondary_dex_jar_metadata_config(ctx.actions, secondary_dex_path, module, module_to_canary_class_name_function, i)
secondary_dexes_for_symlinking[secondary_dex_metadata_config.secondary_dex_metadata_path] = secondary_dex_metadata_config.secondary_dex_metadata_file
else:
secondary_dex_path = _get_raw_secondary_dex_path(i, module)
secondary_dex_metadata_config = _get_secondary_dex_raw_metadata_config(ctx.actions, module, module_to_canary_class_name_function, i)
secondary_dex_output = ctx.actions.declare_output(secondary_dex_path)
secondary_dexes_for_symlinking[secondary_dex_path] = secondary_dex_output
metadata_line_artifacts_by_module.setdefault(module, []).append(secondary_dex_metadata_config.secondary_dex_metadata_line)
else:
secondary_dex_name = _get_raw_secondary_dex_name(i, module)
secondary_dex_output = ctx.actions.declare_output("{}/{}".format(module, secondary_dex_name))
raw_secondary_dexes_for_compressing[secondary_dex_name] = secondary_dex_output
secondary_dex_metadata_config = None
secondary_dex_artifact_list = ctx.actions.declare_output("pre_dexed_artifacts_for_secondary_dex_{}_for_module_{}.txt".format(i + 2, module))
secondary_dex_class_list = ctx.actions.write(
"class_list_for_secondary_dex_{}_for_module_{}.txt".format(i + 2, module),
flatten([secondary_dex_input.dex_class_names for secondary_dex_input in secondary_dex_inputs[i]]),
)
pre_dexed_artifacts = [secondary_dex_input.lib.dex for secondary_dex_input in secondary_dex_inputs[i] if secondary_dex_input.lib.dex]
_merge_dexes(
ctx,
android_toolchain,
secondary_dex_output,
pre_dexed_artifacts,
secondary_dex_artifact_list,
class_names_to_include = secondary_dex_class_list,
secondary_dex_metadata_config = secondary_dex_metadata_config,
)
if split_dex_merge_config.dex_compression == "jar" or split_dex_merge_config.dex_compression == "raw":
metadata_dot_txt_path = "{}/metadata.txt".format(_get_secondary_dex_subdir(module))
metadata_dot_txt_file = ctx.actions.declare_output(metadata_dot_txt_path)
secondary_dexes_for_symlinking[metadata_dot_txt_path] = metadata_dot_txt_file
metadata_dot_txt_files_by_module[module] = metadata_dot_txt_file
else:
raw_secondary_dexes_dir = ctx.actions.symlinked_dir("raw_secondary_dexes_dir_for_module_{}".format(module), raw_secondary_dexes_for_compressing)
secondary_dex_dir_for_module = ctx.actions.declare_output("secondary_dexes_dir_for_{}".format(module), dir = True)
secondary_dex_subdir = secondary_dex_dir_for_module.project(_get_secondary_dex_subdir(module))
multi_dex_cmd = cmd_args(android_toolchain.multi_dex_command[RunInfo])
multi_dex_cmd.add("--secondary-dex-output-dir", secondary_dex_dir_for_module.as_output())
multi_dex_cmd.add("--raw-secondary-dexes-dir", raw_secondary_dexes_dir)
multi_dex_cmd.add("--compression", _get_dex_compression(ctx))
multi_dex_cmd.add("--xz-compression-level", str(ctx.attrs.xz_compression_level))
multi_dex_cmd.add("--module", module)
multi_dex_cmd.add("--canary-class-name", module_to_canary_class_name_function(module))
if not is_root_module(module):
multi_dex_cmd.add("--module-deps", ctx.actions.write("module_deps_for_{}".format(module), apk_module_graph_info.module_to_module_deps_function(module)))
ctx.actions.run(multi_dex_cmd, category = "multi_dex_from_raw_dexes", identifier = "{}:{}_module_{}".format(ctx.label.package, ctx.label.name, module))
secondary_dexes_for_symlinking[_get_secondary_dex_subdir(module)] = secondary_dex_subdir
if metadata_dot_txt_files_by_module:
def write_metadata_dot_txts(ctx: "context", artifacts, outputs):
for voltron_module, metadata_dot_txt in metadata_dot_txt_files_by_module.items():
metadata_line_artifacts = metadata_line_artifacts_by_module[voltron_module]
expect(metadata_line_artifacts != None, "Should have metadata lines!")
metadata_lines = [".id {}".format(voltron_module)]
metadata_lines.extend([".requires {}".format(module_dep) for module_dep in apk_module_graph_info.module_to_module_deps_function(voltron_module)])
if split_dex_merge_config.dex_compression == "raw" and is_root_module(voltron_module):
metadata_lines.append(".root_relative")
for metadata_line_artifact in metadata_line_artifacts:
metadata_lines.append(artifacts[metadata_line_artifact].read_string().strip())
ctx.actions.write(outputs[metadata_dot_txt], metadata_lines)
ctx.actions.dynamic_output(dynamic = flatten(metadata_line_artifacts_by_module.values()), inputs = [], outputs = metadata_dot_txt_files_by_module.values(), f = write_metadata_dot_txts)
ctx.actions.symlinked_dir(
outputs[root_module_secondary_dexes_dir],
root_module_secondary_dexes_for_symlinking,
)
ctx.actions.symlinked_dir(
outputs[non_root_module_secondary_dexes_dir],
non_root_module_secondary_dexes_for_symlinking,
)
ctx.actions.dynamic_output(dynamic = input_artifacts, inputs = [], outputs = outputs, f = merge_pre_dexed_libs)
if is_exopackage_enabled_for_secondary_dex:
secondary_dex_dirs = [non_root_module_secondary_dexes_dir]
secondary_dex_exopackage_info = ExopackageDexInfo(
metadata = root_module_secondary_dexes_metadata,
directory = root_module_secondary_dexes_subdir,
)
else:
secondary_dex_dirs = [root_module_secondary_dexes_dir, non_root_module_secondary_dexes_dir]
secondary_dex_exopackage_info = None
return DexFilesInfo(
primary_dex = primary_dex_output,
secondary_dex_dirs = secondary_dex_dirs,
secondary_dex_exopackage_info = secondary_dex_exopackage_info,
proguard_text_files_path = None,
primary_dex_class_names = primary_dex_class_names_list,
)
def _merge_dexes(
ctx: "context",
android_toolchain: "AndroidToolchainInfo",
output_dex_file: "artifact",
pre_dexed_artifacts: ["artifact"],
pre_dexed_artifacts_file: "artifact",
class_names_to_include: ["artifact", None] = None,
secondary_output_dex_file: ["artifact", None] = None,
secondary_dex_metadata_config: [SecondaryDexMetadataConfig.type, None] = None):
d8_cmd = cmd_args(android_toolchain.d8_command[RunInfo])
d8_cmd.add(["--output-dex-file", output_dex_file.as_output()])
pre_dexed_artifacts_to_dex_file = ctx.actions.write(pre_dexed_artifacts_file.as_output(), pre_dexed_artifacts)
d8_cmd.add(["--files-to-dex-list", pre_dexed_artifacts_to_dex_file])
d8_cmd.hidden(pre_dexed_artifacts)
d8_cmd.add(["--android-jar", android_toolchain.android_jar])
d8_cmd.add(_DEX_MERGE_OPTIONS)
if class_names_to_include:
d8_cmd.add(["--primary-dex-class-names-path", class_names_to_include])
if secondary_output_dex_file:
d8_cmd.add(["--secondary-output-dex-file", secondary_output_dex_file.as_output()])
if secondary_dex_metadata_config:
d8_cmd.add(["--secondary-dex-compression", secondary_dex_metadata_config.secondary_dex_compression])
if secondary_dex_metadata_config.secondary_dex_metadata_file:
d8_cmd.add(["--secondary-dex-metadata-file", secondary_dex_metadata_config.secondary_dex_metadata_file.as_output()])
d8_cmd.add(["--secondary-dex-metadata-line", secondary_dex_metadata_config.secondary_dex_metadata_line.as_output()])
d8_cmd.add(["--secondary-dex-canary-class-name", secondary_dex_metadata_config.secondary_dex_canary_class_name])
ctx.actions.run(
d8_cmd,
category = "d8",
identifier = "{}:{} {}".format(ctx.label.package, ctx.label.name, output_dex_file.short_path),
)
def _sort_pre_dexed_files(
ctx: "context",
artifacts,
pre_dexed_lib_with_class_names_files: ["DexInputWithClassNamesFile"],
split_dex_merge_config: "SplitDexMergeConfig",
get_module_from_target: "function",
module_to_canary_class_name_function: "function") -> [_SortedPreDexedInputs.type]:
sorted_pre_dexed_inputs_map = {}
current_secondary_dex_size_map = {}
current_secondary_dex_inputs_map = {}
for pre_dexed_lib_with_class_names_file in pre_dexed_lib_with_class_names_files:
pre_dexed_lib = pre_dexed_lib_with_class_names_file.lib
module = get_module_from_target(str(pre_dexed_lib.dex.owner.raw_target()))
weight_estimate_string, primary_dex_data, secondary_dex_data = artifacts[pre_dexed_lib_with_class_names_file.weight_estimate_and_filtered_class_names_file].read_string().split(";")
primary_dex_class_names = primary_dex_data.split(",") if primary_dex_data else []
secondary_dex_class_names = secondary_dex_data.split(",") if secondary_dex_data else []
module_pre_dexed_inputs = sorted_pre_dexed_inputs_map.setdefault(module, _SortedPreDexedInputs(
module = module,
primary_dex_inputs = [],
secondary_dex_inputs = [],
))
primary_dex_inputs = module_pre_dexed_inputs.primary_dex_inputs
secondary_dex_inputs = module_pre_dexed_inputs.secondary_dex_inputs
if len(primary_dex_class_names) > 0:
expect(
is_root_module(module),
"Non-root modules should not have anything that belongs in the primary dex, " +
"but {} is assigned to module {} and has the following class names in the primary dex: {}\n".format(
pre_dexed_lib.dex.owner,
module,
"\n".join(primary_dex_class_names),
),
)
primary_dex_inputs.append(
DexInputWithSpecifiedClasses(lib = pre_dexed_lib, dex_class_names = primary_dex_class_names),
)
if len(secondary_dex_class_names) > 0:
weight_estimate = int(weight_estimate_string)
current_secondary_dex_size = current_secondary_dex_size_map.get(module, 0)
if current_secondary_dex_size + weight_estimate > split_dex_merge_config.secondary_dex_weight_limit_bytes:
current_secondary_dex_size = 0
current_secondary_dex_inputs_map[module] = []
current_secondary_dex_inputs = current_secondary_dex_inputs_map.setdefault(module, [])
if len(current_secondary_dex_inputs) == 0:
canary_class_dex_input = _create_canary_class(
ctx,
len(secondary_dex_inputs) + 1,
module,
module_to_canary_class_name_function,
ctx.attrs._dex_toolchain[DexToolchainInfo],
)
current_secondary_dex_inputs.append(canary_class_dex_input)
secondary_dex_inputs.append(current_secondary_dex_inputs)
current_secondary_dex_size_map[module] = current_secondary_dex_size + weight_estimate
current_secondary_dex_inputs.append(
DexInputWithSpecifiedClasses(lib = pre_dexed_lib, dex_class_names = secondary_dex_class_names),
)
return sorted_pre_dexed_inputs_map.values()
def _get_raw_secondary_dex_name(index: int.type, module: str.type) -> str.type:
# Root module begins at 2 (primary classes.dex is 1)
# Non-root module begins at 1 (classes.dex)
if is_root_module(module):
return "classes{}.dex".format(index + 2)
elif index == 0:
return "classes.dex".format(module)
else:
return "classes{}.dex".format(module, index + 1)
def _get_raw_secondary_dex_path(index: int.type, module: str.type):
if is_root_module(module):
return _get_raw_secondary_dex_name(index, module)
else:
return "assets/{}/{}".format(module, _get_raw_secondary_dex_name(index, module))
def _get_jar_secondary_dex_path(index: int.type, module: str.type):
return "{}/{}-{}.dex.jar".format(
_get_secondary_dex_subdir(module),
"secondary" if is_root_module(module) else module,
index + 1,
)
def _get_secondary_dex_subdir(module: str.type):
return "assets/{}".format("secondary-program-dex-jars" if is_root_module(module) else module)
# We create "canary" classes and add them to each secondary dex jar to ensure each jar has a class
# that can be safely loaded on any system. This class is used during secondary dex verification.
_CANARY_FULLY_QUALIFIED_CLASS_NAME_TEMPLATE = "{}.dex{}.Canary"
_CANARY_FILE_NAME_TEMPLATE = "canary_classes/{}/dex{}/Canary.java"
_CANARY_CLASS_PACKAGE_TEMPLATE = "package {}.dex{};\n"
_CANARY_CLASS_INTERFACE_DEFINITION = "public interface Canary {}"
def _create_canary_class(
ctx: "context",
index: int.type,
module: str.type,
module_to_canary_class_name_function: "function",
dex_toolchain: DexToolchainInfo.type) -> DexInputWithSpecifiedClasses.type:
prefix = module_to_canary_class_name_function(module)
canary_class_java_file = ctx.actions.write(_CANARY_FILE_NAME_TEMPLATE.format(prefix, index), [_CANARY_CLASS_PACKAGE_TEMPLATE.format(prefix, index), _CANARY_CLASS_INTERFACE_DEFINITION])
canary_class_jar = ctx.actions.declare_output("canary_classes/{}/canary_jar_{}.jar".format(prefix, index))
compile_to_jar(ctx, [canary_class_java_file], output = canary_class_jar, actions_identifier = "{}_canary_class{}".format(prefix, index))
dex_library_info = get_dex_produced_from_java_library(ctx, dex_toolchain = dex_toolchain, jar_to_dex = canary_class_jar)
return DexInputWithSpecifiedClasses(
lib = dex_library_info,
dex_class_names = [_get_fully_qualified_canary_class_name(module, module_to_canary_class_name_function, index).replace(".", "/") + ".class"],
)
def _get_fully_qualified_canary_class_name(module: str.type, module_to_canary_class_name_function: "function", index: int.type) -> str.type:
prefix = module_to_canary_class_name_function(module)
return _CANARY_FULLY_QUALIFIED_CLASS_NAME_TEMPLATE.format(prefix, index)
def _is_exopackage_enabled_for_secondary_dex(ctx: "context") -> bool.type:
return "secondary_dex" in getattr(ctx.attrs, "exopackage_modes", [])

View File

@ -0,0 +1,27 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
SECONDARY_DEX = 1
NATIVE_LIBRARY = 2
RESOURCES = 4
MODULES = 8
ARCH64 = 16
def get_exopackage_flags(exopackage_modes: [str.type]) -> int.type:
flags = 0
for (name, flag) in [
("secondary_dex", SECONDARY_DEX),
("native_library", NATIVE_LIBRARY),
("resources", RESOURCES),
("modules", MODULES),
("arch64", ARCH64),
]:
if name in exopackage_modes:
flags += flag
return flags

View File

@ -0,0 +1,57 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo")
load(":android_toolchain.bzl", "AndroidToolchainInfo")
_AidlSourceInfo = provider(fields = [
"srcs",
])
def gen_aidl_impl(ctx: "context") -> ["provider"]:
android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo]
aidl_cmd = cmd_args(android_toolchain.aidl)
aidl_cmd.add("-p", android_toolchain.framework_aidl_file)
aidl_cmd.add("-I", ctx.attrs.import_path)
for path in ctx.attrs.import_paths:
aidl_cmd.add("-I", path)
# We need the `aidl_srcs` files - otherwise the search on the `import_path` won't find anything.
aidl_cmd.hidden(ctx.attrs.aidl_srcs)
# Allow gen_aidl rules to depend on other gen_aidl rules, and make the source files from the
# deps accessible in this context. This is an alternative to adding dependent files in
# aidl_srcs.
dep_srcs = []
for dep in ctx.attrs.deps:
source_info = dep.get(_AidlSourceInfo)
if source_info != None:
dep_srcs += source_info.srcs
else:
warning("`{}` dependency `{}` is not a `gen_aidl` rule and will be ignored".format(ctx.label, dep.label))
aidl_cmd.hidden(dep_srcs)
aidl_out = ctx.actions.declare_output("aidl_output", dir = True)
aidl_cmd.add("-o", aidl_out.as_output())
aidl_cmd.add(ctx.attrs.aidl)
ctx.actions.run(aidl_cmd, category = "aidl")
# Put the generated Java files into a zip file to be used as srcs to other rules.
java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo]
jar_cmd = cmd_args(java_toolchain.jar)
jar_cmd.add("-cfM")
out = ctx.actions.declare_output("{}_aidl_java_output.src.zip".format(ctx.attrs.name))
jar_cmd.add(out.as_output())
jar_cmd.add(aidl_out)
ctx.actions.run(jar_cmd, category = "aidl_jar")
return [
DefaultInfo(default_output = out),
_AidlSourceInfo(srcs = [ctx.attrs.aidl] + ctx.attrs.aidl_srcs + dep_srcs),
]

View File

@ -0,0 +1,15 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
_MIN_SDK_VERSION = 19
_MAX_SDK_VERSION = 33
def get_min_sdk_version_constraint_value_name(min_sdk: int.type) -> str.type:
return "min_sdk_version_{}".format(min_sdk)
def get_min_sdk_version_range() -> range.type:
return range(_MIN_SDK_VERSION, _MAX_SDK_VERSION)

View File

@ -0,0 +1,38 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(
"@prelude//android:android_providers.bzl",
"PrebuiltNativeLibraryDir",
"merge_android_packageable_info",
)
def prebuilt_native_library_impl(ctx: "context") -> ["provider"]:
if ctx.attrs.is_asset and ctx.attrs.has_wrap_script:
fail("Cannot use `is_asset` and `has_wrap_script` in the same rule")
prebuilt_native_library_dir = PrebuiltNativeLibraryDir(
raw_target = ctx.label.raw_target(),
dir = ctx.attrs.native_libs,
for_primary_apk = ctx.attrs.has_wrap_script,
is_asset = ctx.attrs.is_asset,
)
android_packageable_info = merge_android_packageable_info(
ctx.label,
ctx.actions,
ctx.attrs.deps,
prebuilt_native_library_dir = prebuilt_native_library_dir,
)
return [
# Buck1 copies the input directory and returns it as the output path. We don't
# copy; we could just return the input directory itself as the output path, but
# we're avoiding that (due to potential confusion from the output path being an
# input directory) until we have an actual need for prebuilt_native_library
# having an output path.
DefaultInfo(),
android_packageable_info,
]

View File

@ -0,0 +1,55 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//java/utils:java_utils.bzl", "get_path_separator")
load("@prelude//utils:utils.bzl", "expect")
def get_preprocessed_java_classes(ctx: "context", input_jars = {"artifact": "target_label"}) -> {"artifact": "target_label"}:
sh_script, _ = ctx.actions.write(
"preprocessed_java_classes/script.sh",
cmd_args(ctx.attrs.preprocess_java_classes_bash),
is_executable = True,
allow_args = True,
)
preprocess_cmd = cmd_args(["/bin/bash", sh_script])
preprocess_cmd.hidden(cmd_args(ctx.attrs.preprocess_java_classes_bash))
for dep in ctx.attrs.preprocess_java_classes_deps:
preprocess_cmd.hidden(dep[DefaultInfo].default_outputs + dep[DefaultInfo].other_outputs)
input_srcs = {}
output_jars = {}
for i, (input_jar, target_label) in enumerate(input_jars.items()):
expect(input_jar.extension == ".jar", "Expected {} to have extension .jar!".format(input_jar))
jar_name = "{}_{}".format(i, input_jar.basename)
input_srcs[jar_name] = input_jar
output_jar = ctx.actions.declare_output(
"preprocessed_java_classes/output_dir/{}".format(jar_name),
)
output_jars[output_jar] = target_label
preprocess_cmd.hidden(output_jar.as_output())
if not output_jars:
return {}
input_dir = ctx.actions.symlinked_dir("preprocessed_java_classes/input_dir", input_srcs)
output_dir = cmd_args(output_jars.keys()[0].as_output()).parent()
env = {
"ANDROID_BOOTCLASSPATH": cmd_args(
ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath,
delimiter = get_path_separator(),
),
"IN_JARS_DIR": cmd_args(input_dir),
"OUT_JARS_DIR": output_dir,
}
ctx.actions.run(preprocess_cmd, env = env, category = "preprocess_java_classes")
return output_jars

View File

@ -0,0 +1,176 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo")
load("@prelude//java/utils:java_utils.bzl", "get_path_separator")
load("@prelude//utils:utils.bzl", "expect")
_UNSCRUBBED_JARS_DIR = "unscrubbed"
ProguardOutput = record(
jars_to_owners = {"artifact": "target_label"},
proguard_configuration_output_file = ["artifact", None],
proguard_mapping_output_file = "artifact",
proguard_artifacts = ["artifact"],
proguard_hidden_artifacts = ["artifact"],
)
def _get_proguard_command_line_args(
ctx: "context",
inputs_to_unscrubbed_outputs: {"artifact": "artifact"},
proguard_configs: ["artifact"],
mapping: "artifact",
configuration: ["artifact", None],
seeds: ["artifact", None],
usage: ["artifact", None],
android_toolchain: "AndroidToolchainInfo") -> ("cmd_args", ["artifact"]):
cmd = cmd_args()
hidden = []
cmd.add("-basedirectory", "<user.dir>")
android_sdk_proguard_config = ctx.attrs.android_sdk_proguard_config or "none"
if android_sdk_proguard_config == "optimized":
cmd.add("-include", android_toolchain.optimized_proguard_config)
cmd.add("-optimizationpasses", str(ctx.attrs.optimization_passes))
hidden.append(android_toolchain.optimized_proguard_config)
elif android_sdk_proguard_config == "default":
cmd.add("-include", android_toolchain.proguard_config)
hidden.append(android_toolchain.proguard_config)
else:
expect(android_sdk_proguard_config == "none")
for proguard_config in dedupe(proguard_configs):
cmd.add("-include")
cmd.add(cmd_args("\"", proguard_config, "\"", delimiter = ""))
hidden.append(proguard_config)
for jar_input, jar_output in inputs_to_unscrubbed_outputs.items():
cmd.add("-injars", jar_input, "-outjars", jar_output if jar_output == jar_input else jar_output.as_output())
cmd.add("-libraryjars")
cmd.add(cmd_args(android_toolchain.android_bootclasspath, delimiter = get_path_separator()))
hidden.extend(android_toolchain.android_bootclasspath)
cmd.add("-printmapping", mapping.as_output())
if configuration:
cmd.add("-printconfiguration", configuration.as_output())
if seeds:
cmd.add("-printseeds", seeds.as_output())
if usage:
cmd.add("-printusage", usage.as_output())
return cmd, hidden
def run_proguard(
ctx: "context",
android_toolchain: "AndroidToolchainInfo",
java_toolchain: "JavaToolchainInfo",
command_line_args_file: "artifact",
command_line_args: "cmd_args",
mapping_file: "artifact"):
run_proguard_cmd = cmd_args()
run_proguard_cmd.add(
java_toolchain.java[RunInfo],
"-XX:-MaxFDLimit",
ctx.attrs.proguard_jvm_args,
"-Xmx{}".format(android_toolchain.proguard_max_heap_size),
"-jar",
android_toolchain.proguard_jar,
)
run_proguard_cmd.add(cmd_args(command_line_args_file, format = "@{}"))
run_proguard_cmd.hidden(command_line_args)
# Some proguard configs can propagate the "-dontobfuscate" flag which disables
# obfuscation and prevents the mapping.txt file from being generated.
sh_cmd = cmd_args([
"sh",
"-c",
"touch $1 && $2",
"--",
mapping_file.as_output(),
cmd_args(run_proguard_cmd, delimiter = " "),
])
ctx.actions.run(sh_cmd, category = "run_proguard")
# Note that ctx.attrs.skip_proguard means that we should create the proguard command line (since
# e.g. Redex might want to consume it) but we don't actually run the proguard command.
def get_proguard_output(
ctx: "context",
input_jars: {"artifact": "target_label"},
java_packaging_deps: ["JavaPackagingDep"],
aapt_generated_proguard_config: ["artifact", None]) -> ProguardOutput.type:
proguard_configs = [packaging_dep.proguard_config for packaging_dep in java_packaging_deps if packaging_dep.proguard_config]
if ctx.attrs.proguard_config:
proguard_configs.append(ctx.attrs.proguard_config)
if not ctx.attrs.ignore_aapt_proguard_config and aapt_generated_proguard_config:
proguard_configs.append(aapt_generated_proguard_config)
if ctx.attrs.skip_proguard:
inputs_to_unscrubbed_outputs = {input_jar: input_jar for input_jar in input_jars.keys()}
mapping = ctx.actions.write("proguard/mapping.txt", [])
configuration = None
seeds = None
usage = None
else:
inputs_to_unscrubbed_outputs = {input_jar: ctx.actions.declare_output(
"proguard_output_jars/{}/{}_{}_obfuscated.jar".format(_UNSCRUBBED_JARS_DIR, input_jar.short_path, i),
) for i, input_jar in enumerate(input_jars.keys())}
mapping = ctx.actions.declare_output("proguard/mapping.txt")
configuration = ctx.actions.declare_output("proguard/configuration.txt")
seeds = ctx.actions.declare_output("proguard/seeds.txt")
usage = ctx.actions.declare_output("proguard/usage.txt")
command_line_args, hidden_artifacts = _get_proguard_command_line_args(
ctx,
inputs_to_unscrubbed_outputs,
proguard_configs,
mapping,
configuration,
seeds,
usage,
ctx.attrs._android_toolchain[AndroidToolchainInfo],
)
command_line_args_file = ctx.actions.write("proguard/command-line.txt", command_line_args)
if ctx.attrs.skip_proguard:
return ProguardOutput(
jars_to_owners = input_jars,
proguard_configuration_output_file = None,
proguard_mapping_output_file = mapping,
proguard_artifacts = [command_line_args_file, mapping],
proguard_hidden_artifacts = hidden_artifacts,
)
else:
unscrubbed_output_jars = {unscrubbed_output: input_jars[input_jar] for input_jar, unscrubbed_output in inputs_to_unscrubbed_outputs.items()}
run_proguard(
ctx,
ctx.attrs._android_toolchain[AndroidToolchainInfo],
ctx.attrs._java_toolchain[JavaToolchainInfo],
command_line_args_file,
command_line_args,
mapping,
)
output_jars = {}
for i, (unscrubbed_jar, target_label) in enumerate(unscrubbed_output_jars.items()):
output = ctx.actions.declare_output(unscrubbed_jar.short_path.replace("{}/".format(_UNSCRUBBED_JARS_DIR), ""))
ctx.actions.run(
cmd_args([ctx.attrs._java_toolchain[JavaToolchainInfo].zip_scrubber, unscrubbed_jar, output.as_output()]),
category = "scrub_jar",
identifier = str(i),
)
output_jars[output] = target_label
return ProguardOutput(
jars_to_owners = output_jars,
proguard_configuration_output_file = configuration,
proguard_mapping_output_file = mapping,
proguard_artifacts = [command_line_args_file, mapping, configuration, seeds, usage],
proguard_hidden_artifacts = hidden_artifacts,
)

View File

@ -0,0 +1,198 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//java:java_library.bzl", "compile_to_jar")
load("@prelude//java:java_providers.bzl", "JavaClasspathEntry", "JavaLibraryInfo", "derive_compiling_deps")
RDotJavaSourceCode = record(
r_dot_java_source_code_dir = "artifact",
r_dot_java_source_code_zipped = "artifact",
strings_source_code_dir = ["artifact", None],
strings_source_code_zipped = ["artifact", None],
ids_source_code_dir = ["artifact", None],
ids_source_code_zipped = ["artifact", None],
)
def get_dummy_r_dot_java(
ctx: "context",
merge_android_resources_tool: RunInfo.type,
android_resources: ["AndroidResourceInfo"],
union_package: [str.type, None]) -> "JavaLibraryInfo":
r_dot_java_source_code = _generate_r_dot_java_source_code(ctx, merge_android_resources_tool, android_resources, "dummy_r_dot_java", union_package = union_package)
library_output = _generate_and_compile_r_dot_java(
ctx,
r_dot_java_source_code.r_dot_java_source_code_zipped,
"dummy_r_dot_java",
)
return JavaLibraryInfo(
compiling_deps = derive_compiling_deps(ctx.actions, library_output, []),
library_output = library_output,
output_for_classpath_macro = library_output.full_library,
)
def generate_r_dot_javas(
ctx: "context",
merge_android_resources_tool: RunInfo.type,
android_resources: ["AndroidResourceInfo"],
banned_duplicate_resource_types: [str.type],
uber_r_dot_txt_files: ["artifact"],
override_symbols_paths: ["artifact"],
duplicate_resources_allowlist: ["artifact", None],
union_package: [str.type, None],
referenced_resources_lists: ["artifact"],
generate_strings_and_ids_separately: [bool.type, None] = True) -> ["JavaLibraryInfo"]:
r_dot_java_source_code = _generate_r_dot_java_source_code(
ctx,
merge_android_resources_tool,
android_resources,
"r_dot_java",
generate_strings_and_ids_separately = generate_strings_and_ids_separately,
force_final_resources_ids = True,
banned_duplicate_resource_types = banned_duplicate_resource_types,
uber_r_dot_txt_files = uber_r_dot_txt_files,
override_symbols_paths = override_symbols_paths,
duplicate_resources_allowlist = duplicate_resources_allowlist,
union_package = union_package,
referenced_resources_lists = referenced_resources_lists,
)
main_library_output = _generate_and_compile_r_dot_java(
ctx,
r_dot_java_source_code.r_dot_java_source_code_zipped,
"main_r_dot_java",
)
if generate_strings_and_ids_separately:
strings_library_output = _generate_and_compile_r_dot_java(
ctx,
r_dot_java_source_code.strings_source_code_zipped,
"strings_r_dot_java",
remove_classes = [".R$"],
)
ids_library_output = _generate_and_compile_r_dot_java(
ctx,
r_dot_java_source_code.ids_source_code_zipped,
"ids_r_dot_java",
remove_classes = [".R$"],
)
else:
strings_library_output = None
ids_library_output = None
return [JavaLibraryInfo(
compiling_deps = derive_compiling_deps(ctx.actions, library_output, []),
library_output = library_output,
output_for_classpath_macro = library_output.full_library,
) for library_output in filter(None, [main_library_output, strings_library_output, ids_library_output])]
def _generate_r_dot_java_source_code(
ctx: "context",
merge_android_resources_tool: RunInfo.type,
android_resources: ["AndroidResourceInfo"],
identifier: str.type,
force_final_resources_ids = False,
generate_strings_and_ids_separately = False,
banned_duplicate_resource_types: [str.type] = [],
uber_r_dot_txt_files: ["artifact"] = [],
override_symbols_paths: ["artifact"] = [],
duplicate_resources_allowlist: ["artifact", None] = None,
union_package: [str.type, None] = None,
referenced_resources_lists: ["artifact"] = []) -> RDotJavaSourceCode.type:
merge_resources_cmd = cmd_args(merge_android_resources_tool)
r_dot_txt_info = cmd_args()
for android_resource in android_resources:
r_dot_txt_info.add(cmd_args([android_resource.text_symbols, android_resource.r_dot_java_package, "_"], delimiter = " ")) # pass target name
r_dot_txt_info_file = ctx.actions.write("r_dot_txt_info_file_for_{}.txt".format(identifier), r_dot_txt_info)
merge_resources_cmd.add(["--symbol-file-info", r_dot_txt_info_file])
merge_resources_cmd.hidden([android_resource.r_dot_java_package for android_resource in android_resources])
merge_resources_cmd.hidden([android_resource.text_symbols for android_resource in android_resources])
output_dir = ctx.actions.declare_output("{}_source_code".format(identifier), dir = True)
merge_resources_cmd.add(["--output-dir", output_dir.as_output()])
output_dir_zipped = ctx.actions.declare_output("{}.src.zip".format(identifier))
merge_resources_cmd.add(["--output-dir-zipped", output_dir_zipped.as_output()])
if generate_strings_and_ids_separately:
strings_output_dir = ctx.actions.declare_output("strings_source_code", dir = True)
merge_resources_cmd.add(["--strings-output-dir", strings_output_dir.as_output()])
strings_output_dir_zipped = ctx.actions.declare_output("strings.src.zip")
merge_resources_cmd.add(["--strings-output-dir-zipped", strings_output_dir_zipped.as_output()])
ids_output_dir = ctx.actions.declare_output("ids_source_code", dir = True)
merge_resources_cmd.add(["--ids-output-dir", ids_output_dir.as_output()])
ids_output_dir_zipped = ctx.actions.declare_output("ids.src.zip")
merge_resources_cmd.add(["--ids-output-dir-zipped", ids_output_dir_zipped.as_output()])
else:
strings_output_dir = None
strings_output_dir_zipped = None
ids_output_dir = None
ids_output_dir_zipped = None
if force_final_resources_ids:
merge_resources_cmd.add("--force-final-resource-ids")
if len(banned_duplicate_resource_types) > 0:
banned_duplicate_resource_types_file = ctx.actions.write("banned_duplicate_resource_types_file", banned_duplicate_resource_types)
merge_resources_cmd.add(["--banned-duplicate-resource-types", banned_duplicate_resource_types_file])
if len(uber_r_dot_txt_files) > 0:
uber_r_dot_txt_files_list = ctx.actions.write("uber_r_dot_txt_files_list", uber_r_dot_txt_files)
merge_resources_cmd.add(["--uber-r-dot-txt", uber_r_dot_txt_files_list])
merge_resources_cmd.hidden(uber_r_dot_txt_files)
if len(override_symbols_paths) > 0:
override_symbols_paths_list = ctx.actions.write("override_symbols_paths_list", override_symbols_paths)
merge_resources_cmd.add(["--override-symbols", override_symbols_paths_list])
merge_resources_cmd.hidden(override_symbols_paths)
if duplicate_resources_allowlist != None:
merge_resources_cmd.add(["--duplicate-resource-allowlist-path", duplicate_resources_allowlist])
if union_package != None:
merge_resources_cmd.add(["--union-package", union_package])
if referenced_resources_lists:
referenced_resources_file = ctx.actions.write("referenced_resources_lists", referenced_resources_lists)
merge_resources_cmd.add(["--referenced-resources-lists", referenced_resources_file])
merge_resources_cmd.hidden(referenced_resources_lists)
ctx.actions.run(merge_resources_cmd, category = "r_dot_java_merge_resources", identifier = identifier)
return RDotJavaSourceCode(
r_dot_java_source_code_dir = output_dir,
r_dot_java_source_code_zipped = output_dir_zipped,
strings_source_code_dir = strings_output_dir,
strings_source_code_zipped = strings_output_dir_zipped,
ids_source_code_dir = ids_output_dir,
ids_source_code_zipped = ids_output_dir_zipped,
)
def _generate_and_compile_r_dot_java(
ctx: "context",
r_dot_java_source_code_zipped: "artifact",
identifier: str.type,
remove_classes: [str.type] = []) -> JavaClasspathEntry.type:
r_dot_java_out = ctx.actions.declare_output("{}.jar".format(identifier))
compile_to_jar(
ctx,
output = r_dot_java_out,
actions_identifier = identifier,
javac_tool = None,
srcs = [r_dot_java_source_code_zipped],
remove_classes = remove_classes,
)
# Extracting an abi is unnecessary as there's not really anything to strip.
outputs = JavaClasspathEntry(
full_library = r_dot_java_out,
abi = r_dot_java_out,
abi_as_dir = None,
required_for_source_only_abi = False,
)
return outputs

View File

@ -0,0 +1,96 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_binary_resources_rules.bzl", "get_android_binary_resources_info")
load("@prelude//android:android_library.bzl", "build_android_library")
load("@prelude//android:android_providers.bzl", "merge_android_packageable_info")
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//java:java_test.bzl", "build_junit_test")
load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo")
load("@prelude//utils:utils.bzl", "expect")
load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info")
def robolectric_test_impl(ctx: "context") -> ["provider"]:
if ctx.attrs._build_only_native_code:
return [DefaultInfo()]
extra_cmds = []
# Force robolectric to only use local dependency resolution.
extra_cmds.append("-Drobolectric.offline=true")
if ctx.attrs.robolectric_runtime_dependency:
runtime_dependencies_dir = ctx.attrs.robolectric_runtime_dependency
elif ctx.attrs.robolectric_runtime_dependencies:
runtime_dependencies_dir = ctx.actions.symlinked_dir("runtime_dependencies", {
runtime_dep.basename: runtime_dep
for runtime_dep in ctx.attrs.robolectric_runtime_dependencies
})
else:
runtime_dependencies_dir = None
if runtime_dependencies_dir:
extra_cmds.append(cmd_args(runtime_dependencies_dir, format = "-Drobolectric.dependency.dir={}"))
all_packaging_deps = ctx.attrs.deps + (ctx.attrs.deps_query or []) + ctx.attrs.exported_deps + ctx.attrs.runtime_deps
android_packageable_info = merge_android_packageable_info(ctx.label, ctx.actions, all_packaging_deps)
resources_info = get_android_binary_resources_info(
ctx,
all_packaging_deps,
android_packageable_info,
java_packaging_deps = [], # Only used for third-party jar resources, which we don't care about here.
use_proto_format = False,
referenced_resources_lists = [],
generate_strings_and_ids_separately = False,
aapt2_min_sdk = ctx.attrs.manifest_entries.get("min_sdk_version", None),
aapt2_preferred_density = ctx.attrs.preferred_density_for_binary_resources,
)
test_config_properties_file = ctx.actions.write(
"test_config.properties",
[
cmd_args(["android_resource_apk", resources_info.primary_resources_apk], delimiter = "="),
cmd_args(["android_merged_manifest", resources_info.manifest], delimiter = "="),
],
)
# Robolectric looks for a file named /com/android/tools/test_config.properties on the classpath
test_config_symlinked_dir = ctx.actions.symlinked_dir("test_config_symlinked_dir", {"com/android/tools/test_config.properties": test_config_properties_file})
test_config_properties_jar = ctx.actions.declare_output("test_config_properties.jar")
jar_cmd = cmd_args([
ctx.attrs._java_toolchain[JavaToolchainInfo].jar,
"-cfM", # -c: create new archive, -f: specify the file name, -M: do not create a manifest
test_config_properties_jar.as_output(),
"-C",
test_config_symlinked_dir,
".",
])
ctx.actions.run(jar_cmd, category = "test_config_properties_jar_cmd")
extra_cmds.append(cmd_args().hidden(resources_info.primary_resources_apk, resources_info.manifest))
r_dot_javas = [r_dot_java.library_output.full_library for r_dot_java in resources_info.r_dot_javas if r_dot_java.library_output]
expect(len(r_dot_javas) <= 1, "android_library only works with single R.java")
java_providers, _ = build_android_library(ctx, r_dot_java = r_dot_javas[0] if r_dot_javas else None)
extra_classpath_entries = [test_config_properties_jar] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath
extra_classpath_entries.extend(r_dot_javas)
external_runner_test_info = build_junit_test(
ctx,
java_providers.java_library_info,
java_providers.java_packaging_info,
java_providers.class_to_src_map,
extra_cmds = extra_cmds,
extra_classpath_entries = extra_classpath_entries,
)
return inject_test_run_info(ctx, external_runner_test_info) + [
java_providers.java_library_info,
java_providers.java_library_intellij_info,
java_providers.java_packaging_info,
java_providers.template_placeholder_info,
java_providers.default_info,
]

251
prelude/android/voltron.bzl Normal file
View File

@ -0,0 +1,251 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//android:android_providers.bzl", "AndroidPackageableInfo", "merge_android_packageable_info")
load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo")
load("@prelude//java:java_providers.bzl", "get_all_java_packaging_deps")
load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "merge_shared_libraries", "traverse_shared_library_info")
load("@prelude//utils:utils.bzl", "expect", "flatten")
# "Voltron" gives us the ability to split our Android APKs into different "modules". These
# modules can then be downloaded on demand rather than shipped with the "main" APK.
#
# The module corresponding to the "main" APK is called the "root" module.
#
# Voltron support comes in two main parts:
# (1) Constructing the Voltron module graph (assigning targets to each module). This is done
# by constructing a "target graph" and then delegating to buck1 to produce the module graph.
# (2) Using the Voltron module graph while building our APK.
#
# For (1), in order to calculate which targets belong to each module, we reconstruct a "target
# graph" from "deps" information that is propagated up through AndroidPackageableInfo.
# In buck1 we use the underlying "TargetGraph" object that is based on the raw target
# definitions. This results in some slightly different behavior for `provided_deps` - in
# buck2, we (correctly) ignore `provided_deps`, since they do not influence the packaging of
# the APK, whereas in `buck1`, we treat `provided_deps` the same as `deps`.
# In practice, this rarely affects the module assignments, but can mean that `buck2` will
# put a target inside a module whereas `buck1` will put it into the main APK (since `buck1`
# can find a path from an "always in main APK seed" to the target via some `provided_dep`,
# whereas `buck2` does not).
#
# For (2), we package up secondary DEX files and native libs into `assets/module_name` (see
# dex_rules.bzl and android_binary_native_rules.bzl for more information on how we do that).
# It is worth noting that we still put all of the non-root modules into the final APK. If
# the module should be downloaded on demand, then it is removed from the final APK in a
# subsequent post-processing step.
#
# There is also an `android_app_modularity` rule that just prints out details of the Voltron
# module graph and is used for any subsequent verification.
def android_app_modularity_impl(ctx: "context") -> ["provider"]:
if ctx.attrs._build_only_native_code:
return [
# Add an unused default output in case this target is used as an attr.source() anywhere.
DefaultInfo(default_output = ctx.actions.write("{}/unused.txt".format(ctx.label.name), [])),
]
all_deps = ctx.attrs.deps + flatten(ctx.attrs.application_module_configs.values())
android_packageable_info = merge_android_packageable_info(ctx.label, ctx.actions, all_deps)
shared_library_info = merge_shared_libraries(
ctx.actions,
deps = filter(None, [x.get(SharedLibraryInfo) for x in all_deps]),
)
traversed_shared_library_info = traverse_shared_library_info(shared_library_info)
cmd, output = _get_base_cmd_and_output(
ctx.actions,
ctx.label,
android_packageable_info,
traversed_shared_library_info,
ctx.attrs._android_toolchain[AndroidToolchainInfo],
ctx.attrs.application_module_configs,
ctx.attrs.application_module_dependencies,
ctx.attrs.application_module_blacklist,
)
if ctx.attrs.should_include_classes:
no_dx_target_labels = [no_dx_target.label.raw_target() for no_dx_target in ctx.attrs.no_dx]
java_packaging_deps = [packaging_dep for packaging_dep in get_all_java_packaging_deps(ctx, all_deps) if packaging_dep.dex and packaging_dep.dex.dex.owner.raw_target() not in no_dx_target_labels]
targets_to_jars_args = [cmd_args([str(packaging_dep.label.raw_target()), packaging_dep.jar], delimiter = " ") for packaging_dep in java_packaging_deps]
targets_to_jars = ctx.actions.write("targets_to_jars.txt", targets_to_jars_args)
cmd.add([
"--targets-to-jars",
targets_to_jars,
]).hidden(targets_to_jars_args)
if ctx.attrs.should_include_libraries:
targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), so_name, str(shared_lib.can_be_asset)], delimiter = " ") for so_name, shared_lib in traversed_shared_library_info.items()]
targets_to_so_names = ctx.actions.write("targets_to_so_names.txt", targets_to_so_names_args)
cmd.add([
"--targets-to-so-names",
targets_to_so_names,
]).hidden(targets_to_so_names_args)
ctx.actions.run(cmd, category = "apk_module_graph")
return [DefaultInfo(default_output = output)]
def get_target_to_module_mapping(ctx: "context", deps: ["dependency"]) -> ["artifact", None]:
if not ctx.attrs.application_module_configs:
return None
all_deps = deps + flatten(ctx.attrs.application_module_configs.values())
android_packageable_info = merge_android_packageable_info(ctx.label, ctx.actions, all_deps)
shared_library_info = merge_shared_libraries(
ctx.actions,
deps = filter(None, [x.get(SharedLibraryInfo) for x in all_deps]),
)
traversed_shared_library_info = traverse_shared_library_info(shared_library_info)
cmd, output = _get_base_cmd_and_output(
ctx.actions,
ctx.label,
android_packageable_info,
traversed_shared_library_info,
ctx.attrs._android_toolchain[AndroidToolchainInfo],
ctx.attrs.application_module_configs,
ctx.attrs.application_module_dependencies,
ctx.attrs.application_module_blacklist,
)
cmd.add("--output-module-info-and-target-to-module-only")
ctx.actions.run(cmd, category = "apk_module_graph")
return output
def _get_base_cmd_and_output(
actions: "actions",
label: "label",
android_packageable_info: "AndroidPackageableInfo",
traversed_shared_library_info: {str.type: "SharedLibrary"},
android_toolchain: "AndroidToolchainInfo",
application_module_configs: {str.type: ["dependency"]},
application_module_dependencies: [{str.type: [str.type]}, None],
application_module_blocklist: [[["dependency"]], None]) -> ("cmd_args", "artifact"):
deps_infos = list(android_packageable_info.deps.traverse()) if android_packageable_info.deps else []
deps_map = {deps_info.name: deps_info.deps for deps_info in deps_infos}
target_graph_file = actions.write_json("target_graph.json", deps_map)
application_module_configs_map = {
module_name: [seed.label.raw_target() for seed in seeds if seed.get(AndroidPackageableInfo)]
for module_name, seeds in application_module_configs.items()
}
application_module_configs_file = actions.write_json("application_module_configs.json", application_module_configs_map)
application_module_dependencies_file = actions.write_json("application_module_dependencies.json", application_module_dependencies or {})
output = actions.declare_output("apk_module_metadata.txt")
cmd = cmd_args([
android_toolchain.apk_module_graph[RunInfo],
"--root-target",
str(label.raw_target()),
"--target-graph",
target_graph_file,
"--seed-config-map",
application_module_configs_file,
"--app-module-dependencies-map",
application_module_dependencies_file,
"--output",
output.as_output(),
])
# Anything that is used by a wrap script needs to go into the primary APK, as do all
# of their deps.
used_by_wrap_script_libs = [str(shared_lib.label.raw_target()) for shared_lib in traversed_shared_library_info.values() if shared_lib.for_primary_apk]
prebuilt_native_library_dirs = list(android_packageable_info.prebuilt_native_library_dirs.traverse()) if android_packageable_info.prebuilt_native_library_dirs else []
prebuilt_native_library_targets_for_primary_apk = [str(native_lib_dir.raw_target) for native_lib_dir in prebuilt_native_library_dirs if native_lib_dir.for_primary_apk]
if application_module_blocklist or used_by_wrap_script_libs or prebuilt_native_library_targets_for_primary_apk:
all_blocklisted_deps = used_by_wrap_script_libs + prebuilt_native_library_targets_for_primary_apk
if application_module_blocklist:
all_blocklisted_deps.extend([str(blocklisted_dep.label.raw_target()) for blocklisted_dep in flatten(application_module_blocklist)])
application_module_blocklist_file = actions.write(
"application_module_blocklist.txt",
all_blocklisted_deps,
)
cmd.add([
"--always-in-main-apk-seeds",
application_module_blocklist_file,
])
return cmd, output
ROOT_MODULE = "dex"
def is_root_module(module: str.type) -> bool.type:
return module == ROOT_MODULE
def all_targets_in_root_module(_module: str.type) -> str.type:
return ROOT_MODULE
APKModuleGraphInfo = record(
module_list = [str.type],
target_to_module_mapping_function = "function",
module_to_canary_class_name_function = "function",
module_to_module_deps_function = "function",
)
def get_root_module_only_apk_module_graph_info() -> APKModuleGraphInfo.type:
def root_module_canary_class_name(module: str.type):
expect(is_root_module(module))
return "secondary"
def root_module_deps(module: str.type):
expect(is_root_module(module))
return []
return APKModuleGraphInfo(
module_list = [ROOT_MODULE],
target_to_module_mapping_function = all_targets_in_root_module,
module_to_canary_class_name_function = root_module_canary_class_name,
module_to_module_deps_function = root_module_deps,
)
def get_apk_module_graph_info(
ctx: "context",
apk_module_graph_file: "artifact",
artifacts) -> APKModuleGraphInfo.type:
apk_module_graph_lines = artifacts[apk_module_graph_file].read_string().split("\n")
module_count = int(apk_module_graph_lines[0])
module_infos = apk_module_graph_lines[1:module_count + 1]
target_to_module_lines = apk_module_graph_lines[module_count + 1:-1]
expect(apk_module_graph_lines[-1] == "", "Expect last line to be an empty string!")
module_to_canary_class_name_map = {}
module_to_module_deps_map = {}
for line in module_infos:
line_data = line.split(" ")
module_name = line_data[0]
canary_class_name = line_data[1]
module_deps = [module_dep for module_dep in line_data[2:] if module_dep]
module_to_canary_class_name_map[module_name] = canary_class_name
module_to_module_deps_map[module_name] = module_deps
target_to_module_mapping = {str(ctx.label.raw_target()): ROOT_MODULE}
for line in target_to_module_lines:
target, module = line.split(" ")
target_to_module_mapping[target] = module
def target_to_module_mapping_function(raw_target: str.type) -> str.type:
mapped_module = target_to_module_mapping.get(raw_target)
expect(mapped_module != None, "No module found for target {}!".format(raw_target))
return mapped_module
def module_to_canary_class_name_function(voltron_module: str.type) -> str.type:
return module_to_canary_class_name_map.get(voltron_module)
def module_to_module_deps_function(voltron_module: str.type) -> list.type:
return module_to_module_deps_map.get(voltron_module)
return APKModuleGraphInfo(
module_list = module_to_canary_class_name_map.keys(),
target_to_module_mapping_function = target_to_module_mapping_function,
module_to_canary_class_name_function = module_to_canary_class_name_function,
module_to_module_deps_function = module_to_module_deps_function,
)

View File

@ -0,0 +1,108 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load("@prelude//utils:utils.bzl", "flatten")
load(":apple_asset_catalog_compilation_options.bzl", "AppleAssetCatalogsCompilationOptions", "get_apple_asset_catalogs_compilation_options") # @unused Used as a type
load(":apple_asset_catalog_types.bzl", "AppleAssetCatalogResult", "AppleAssetCatalogSpec", "StringWithSourceTarget")
load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_bundle_resource_processing_options")
load(":apple_sdk.bzl", "get_apple_sdk_name")
load(":apple_sdk_metadata.bzl", "get_apple_sdk_metadata_for_sdk_name")
load(":resource_groups.bzl", "create_resource_graph")
def apple_asset_catalog_impl(ctx: "context") -> ["provider"]:
spec = AppleAssetCatalogSpec(
app_icon = StringWithSourceTarget(source = ctx.label, value = ctx.attrs.app_icon) if ctx.attrs.app_icon != None else None,
dirs = ctx.attrs.dirs,
launch_image = StringWithSourceTarget(source = ctx.label, value = ctx.attrs.launch_image) if ctx.attrs.launch_image != None else None,
)
graph = create_resource_graph(
ctx = ctx,
labels = ctx.attrs.labels,
deps = [],
exported_deps = [],
asset_catalog_spec = spec,
)
return [DefaultInfo(default_output = None), graph]
def compile_apple_asset_catalog(ctx: "context", specs: [AppleAssetCatalogSpec.type]) -> [AppleAssetCatalogResult.type, None]:
single_spec = _merge_asset_catalog_specs(ctx, specs)
if len(single_spec.dirs) == 0:
return None
plist = ctx.actions.declare_output("AssetCatalog.plist")
catalog = ctx.actions.declare_output("AssetCatalogCompiled", dir = True)
processing_options = get_bundle_resource_processing_options(ctx)
compilation_options = get_apple_asset_catalogs_compilation_options(ctx)
command = _get_actool_command(ctx, single_spec, catalog.as_output(), plist.as_output(), compilation_options)
ctx.actions.run(command, prefer_local = processing_options.prefer_local, allow_cache_upload = processing_options.allow_cache_upload, category = "apple_asset_catalog")
return AppleAssetCatalogResult(compiled_catalog = catalog, catalog_plist = plist)
def _merge_asset_catalog_specs(ctx: "context", xs: [AppleAssetCatalogSpec.type]) -> AppleAssetCatalogSpec.type:
app_icon = _get_at_most_one_attribute(ctx, xs, "app_icon")
launch_image = _get_at_most_one_attribute(ctx, xs, "launch_image")
dirs = dedupe(flatten([x.dirs for x in xs]))
return AppleAssetCatalogSpec(app_icon = app_icon, dirs = dirs, launch_image = launch_image)
def _get_at_most_one_attribute(ctx: "context", xs: ["_record"], attr_name: str.type) -> ["StringWithSourceTarget", None]:
all_values = dedupe(filter(None, [getattr(x, attr_name) for x in xs]))
if len(all_values) > 1:
fail("At most one asset catalog in the dependencies of `{}` can have an `{}` attribute. At least 2 catalogs are providing it: `{}` and `{}`.".format(_get_target(ctx), attr_name, all_values[0].source, all_values[1].source))
elif len(all_values) == 1:
return all_values[0]
else:
return None
def _get_target(ctx: "context") -> str.type:
return ctx.label.package + ":" + ctx.label.name
def _get_actool_command(ctx: "context", info: AppleAssetCatalogSpec.type, catalog_output: "output_artifact", plist_output: "output_artifact", compilation_options: AppleAssetCatalogsCompilationOptions.type) -> "cmd_args":
external_name = get_apple_sdk_name(ctx)
target_device = get_apple_sdk_metadata_for_sdk_name(external_name).target_device_flags
actool = ctx.attrs._apple_toolchain[AppleToolchainInfo].actool
actool_command = cmd_args([
actool,
"--platform",
external_name,
"--minimum-deployment-target",
get_bundle_min_target_version(ctx),
"--compile",
catalog_output,
"--output-partial-info-plist",
plist_output,
] +
target_device +
(
["--app-icon", info.app_icon.value] if info.app_icon else []
) + (
["--launch-image", info.launch_image.value] if info.launch_image else []
) + (
["--notices"] if compilation_options.enable_notices else []
) + (
["--warnings"] if compilation_options.enable_warnings else []
) + (
["--errors"] if compilation_options.enable_errors else []
) + (
["--compress-pngs"] if compilation_options.compress_pngs else []
) +
["--optimization", compilation_options.optimization] +
["--output-format", compilation_options.output_format] +
compilation_options.extra_flags +
info.dirs)
# `actool` expects the output directory to be present.
# Use the wrapper script to create the directory first and then actually call `actool`.
wrapper_script, _ = ctx.actions.write(
"actool_wrapper.sh",
[
cmd_args(catalog_output, format = "mkdir -p {}"),
cmd_args(actool_command, delimiter = " "),
],
allow_args = True,
)
command = cmd_args(["/bin/sh", wrapper_script]).hidden([actool_command])
return command

View File

@ -0,0 +1,29 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
AppleAssetCatalogsCompilationOptions = record(
enable_notices = field(bool.type),
enable_warnings = field(bool.type),
enable_errors = field(bool.type),
compress_pngs = field(bool.type),
optimization = field(str.type),
output_format = field(str.type),
extra_flags = field([str.type]),
)
def get_apple_asset_catalogs_compilation_options(ctx: "context") -> AppleAssetCatalogsCompilationOptions.type:
options = ctx.attrs.asset_catalogs_compilation_options
return AppleAssetCatalogsCompilationOptions(
enable_notices = options.get("notices", True),
enable_warnings = options.get("warnings", True),
enable_errors = options.get("errors", True),
compress_pngs = options.get("compress_pngs", True),
optimization = options.get("optimization", "space"),
output_format = options.get("output_format", "human-readable-text"),
extra_flags = options.get("extra_flags", []),
)

View File

@ -0,0 +1,30 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
StringWithSourceTarget = record(
# Target providing the string value
source = field("label"),
value = field("string"),
)
AppleAssetCatalogSpec = record(
# At most one per given `apple_bundle` (including all transitive catalog dependencies),
# optional reference in a form of a name (extension omitted) of an .appiconset which
# contains an image set representing an application icon.
# This set should be contained in one of catalogs referenced by `dirs` attribute.
app_icon = field([StringWithSourceTarget.type, None]),
dirs = field(["artifact"]),
# Same as `app_icon` but with an application launch image semantics.
launch_image = field([StringWithSourceTarget.type, None]),
)
AppleAssetCatalogResult = record(
# Directory which contains compiled assets ready to be copied into application bundle
compiled_catalog = field("artifact"),
# .plist file to be merged into main application Info.plist file, containing information about compiled assets
catalog_plist = field("artifact"),
)

View File

@ -0,0 +1,99 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_stripping.bzl", "apple_strip_args")
load("@prelude//cxx:cxx_executable.bzl", "cxx_executable")
load("@prelude//cxx:cxx_library_utility.bzl", "cxx_attr_deps", "cxx_attr_exported_deps")
load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags")
load("@prelude//cxx:cxx_types.bzl", "CxxRuleConstructorParams")
load("@prelude//cxx:debug.bzl", "project_external_debug_info")
load(
"@prelude//cxx:link_groups.bzl",
"get_link_group_info",
)
load(
"@prelude//cxx:preprocessor.bzl",
"CPreprocessor",
)
load(":apple_bundle_types.bzl", "AppleBundleLinkerMapInfo", "AppleMinDeploymentVersionInfo")
load(":apple_bundle_utility.bzl", "get_bundle_infos_from_graph", "merge_bundle_linker_maps_info")
load(":apple_code_signing_types.bzl", "AppleEntitlementsInfo")
load(":apple_dsym.bzl", "AppleDebuggableInfo", "DEBUGINFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym")
load(":apple_frameworks.bzl", "get_framework_search_path_flags")
load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags")
load(":apple_utility.bzl", "get_apple_cxx_headers_layout")
load(":resource_groups.bzl", "create_resource_graph")
load(":xcode.bzl", "apple_populate_xcode_attributes")
def apple_binary_impl(ctx: "context") -> ["provider"]:
extra_link_flags = get_min_deployment_version_target_linker_flags(ctx) + _entitlements_link_flags(ctx)
framework_search_path_pre = CPreprocessor(
args = [get_framework_search_path_flags(ctx)],
)
constructor_params = CxxRuleConstructorParams(
rule_type = "apple_binary",
headers_layout = get_apple_cxx_headers_layout(ctx),
extra_link_flags = extra_link_flags,
srcs = get_srcs_with_flags(ctx),
extra_preprocessors = get_min_deployment_version_target_preprocessor_flags(ctx) + [framework_search_path_pre],
strip_executable = ctx.attrs.stripped,
strip_args_factory = apple_strip_args,
cxx_populate_xcode_attributes_func = apple_populate_xcode_attributes,
link_group_info = get_link_group_info(ctx),
prefer_stripped_objects = ctx.attrs.prefer_stripped_objects,
# Some apple rules rely on `static` libs *not* following dependents.
link_groups_force_static_follows_dependents = False,
)
cxx_output = cxx_executable(ctx, constructor_params)
external_debug_info = project_external_debug_info(
actions = ctx.actions,
label = ctx.label,
infos = [cxx_output.external_debug_info],
)
dsym_artifact = get_apple_dsym(
ctx = ctx,
executable = cxx_output.binary,
external_debug_info = external_debug_info,
action_identifier = cxx_output.binary.short_path,
)
cxx_output.sub_targets[DSYM_SUBTARGET] = [DefaultInfo(default_output = dsym_artifact)]
cxx_output.sub_targets[DEBUGINFO_SUBTARGET] = [DefaultInfo(other_outputs = external_debug_info)]
min_version = get_min_deployment_version_for_node(ctx)
min_version_providers = [AppleMinDeploymentVersionInfo(version = min_version)] if min_version != None else []
resource_graph = create_resource_graph(
ctx = ctx,
labels = ctx.attrs.labels,
deps = cxx_attr_deps(ctx),
exported_deps = cxx_attr_exported_deps(ctx),
)
bundle_infos = get_bundle_infos_from_graph(resource_graph)
if cxx_output.linker_map_data:
bundle_infos.append(AppleBundleLinkerMapInfo(linker_maps = [cxx_output.linker_map_data.map]))
return [
DefaultInfo(default_output = cxx_output.binary, sub_targets = cxx_output.sub_targets),
RunInfo(args = cmd_args(cxx_output.binary).hidden(cxx_output.runtime_files)),
AppleEntitlementsInfo(entitlements_file = ctx.attrs.entitlements_file),
AppleDebuggableInfo(dsyms = [dsym_artifact], external_debug_info = cxx_output.external_debug_info),
cxx_output.xcode_data,
merge_bundle_linker_maps_info(bundle_infos),
] + [resource_graph] + min_version_providers
def _entitlements_link_flags(ctx: "context") -> [""]:
return [
"-Xlinker",
"-sectcreate",
"-Xlinker",
"__TEXT",
"-Xlinker",
"__entitlements",
"-Xlinker",
ctx.attrs.entitlements_file,
] if ctx.attrs.entitlements_file else []

View File

@ -0,0 +1,290 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load("@prelude//apple/user:apple_selective_debugging.bzl", "AppleSelectiveDebuggingInfo", "filter_debug_info")
load("@prelude//cxx:debug.bzl", "maybe_external_debug_info", "project_external_debug_info")
load(
"@prelude//ide_integrations:xcode.bzl",
"XCODE_DATA_SUB_TARGET",
"generate_xcode_data",
)
load("@prelude//utils:utils.bzl", "expect", "flatten", "is_any")
load(":apple_bundle_destination.bzl", "AppleBundleDestination")
load(":apple_bundle_part.bzl", "AppleBundlePart", "assemble_bundle", "bundle_output", "get_apple_bundle_part_relative_destination_path")
load(":apple_bundle_resources.bzl", "get_apple_bundle_resource_part_list", "get_is_watch_bundle")
load(":apple_bundle_types.bzl", "AppleBundleInfo", "AppleBundleLinkerMapInfo", "AppleBundleResourceInfo")
load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_product_name")
load(":apple_dsym.bzl", "AppleBundleDebuggableInfo", "AppleDebuggableInfo", "DEBUGINFO_SUBTARGET", "DSYM_INFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym", "get_apple_dsym_info")
load(":apple_sdk.bzl", "get_apple_sdk_name")
load(":xcode.bzl", "apple_xcode_data_add_xctoolchain")
INSTALL_DATA_SUB_TARGET = "install-data"
_INSTALL_DATA_FILE_NAME = "install_apple_data.json"
_PLIST = "plist"
_XCTOOLCHAIN_SUB_TARGET = "xctoolchain"
AppleBundlePartListConstructorParams = record(
# The binaries/executables, required to create a bundle
binaries = field([AppleBundlePart.type]),
)
AppleBundlePartListOutput = record(
# The parts to be copied into an Apple bundle, *including* binaries
parts = field([AppleBundlePart.type]),
# Part that holds the info.plist
info_plist_part = field(AppleBundlePart.type),
)
AppleBundleBinaryOutput = record(
binary = field("artifact"),
debuggable_info = field([AppleDebuggableInfo.type, None], None),
# In the case of watchkit, the `ctx.attrs.binary`'s not set, and we need to create a stub binary.
is_watchkit_stub_binary = field(bool.type, False),
)
def _get_binary(ctx: "context") -> AppleBundleBinaryOutput.type:
# No binary means we are building watchOS bundle. In v1 bundle binary is present, but its sources are empty.
if ctx.attrs.binary == None:
return AppleBundleBinaryOutput(
binary = _get_watch_kit_stub_artifact(ctx),
is_watchkit_stub_binary = True,
)
binary_dep = ctx.attrs.binary
if len(binary_dep[DefaultInfo].default_outputs) != 1:
fail("Expected single output artifact. Make sure the implementation of rule from `binary` attribute is correct.")
return _maybe_scrub_binary(ctx, binary_dep)
def _maybe_scrub_binary(ctx, binary_dep: "dependency") -> AppleBundleBinaryOutput.type:
binary = binary_dep[DefaultInfo].default_outputs[0]
debuggable_info = binary_dep.get(AppleDebuggableInfo)
if ctx.attrs.selective_debugging == None:
return AppleBundleBinaryOutput(binary = binary, debuggable_info = debuggable_info)
selective_debugging_info = ctx.attrs.selective_debugging[AppleSelectiveDebuggingInfo]
binary = selective_debugging_info.scrub_binary(ctx, binary)
if not debuggable_info:
return AppleBundleBinaryOutput(binary = binary)
# If we have debuggable info for this binary, create the scrubed dsym for the binary
# and filter debug info.
external_debug_info = debuggable_info.external_debug_info
dsym_artifact = _get_scrubbed_binary_dsym(ctx, binary, external_debug_info)
all_debug_info = external_debug_info.traverse()
filtered_debug_info = filter_debug_info(all_debug_info, selective_debugging_info)
filtered_external_debug_info = maybe_external_debug_info(
actions = ctx.actions,
label = ctx.label,
artifacts = filtered_debug_info,
)
debuggable_info = AppleDebuggableInfo(dsyms = [dsym_artifact], external_debug_info = filtered_external_debug_info)
return AppleBundleBinaryOutput(binary = binary, debuggable_info = debuggable_info)
def _get_scrubbed_binary_dsym(ctx, binary: "artifact", external_debug_info: "transitive_set") -> "artifact":
external_debug_info_args = project_external_debug_info(
actions = ctx.actions,
label = ctx.label,
infos = [external_debug_info],
)
dsym_artifact = get_apple_dsym(
ctx = ctx,
executable = binary,
external_debug_info = external_debug_info_args,
action_identifier = binary.short_path,
)
return dsym_artifact
def _get_binary_bundle_parts(ctx: "context", binary_output: AppleBundleBinaryOutput.type) -> ([AppleBundlePart.type], AppleBundlePart.type):
"""Returns a tuple of all binary bundle parts and the primary bundle binary."""
result = []
if binary_output.is_watchkit_stub_binary:
# If we're using a stub binary from watchkit, we also need to add extra part for stub.
result.append(AppleBundlePart(source = binary_output.binary, destination = AppleBundleDestination("watchkitstub"), new_name = "WK"))
primary_binary_part = AppleBundlePart(source = binary_output.binary, destination = AppleBundleDestination("executables"), new_name = get_product_name(ctx))
result.append(primary_binary_part)
return result, primary_binary_part
def _get_watch_kit_stub_artifact(ctx: "context") -> "artifact":
expect(ctx.attrs.binary == None, "Stub is useful only when binary is not set which means watchOS bundle is built.")
stub_binary = ctx.attrs._apple_toolchain[AppleToolchainInfo].watch_kit_stub_binary
if stub_binary == None:
fail("Expected Watch Kit stub binary to be provided when bundle binary is not set.")
return stub_binary
def _apple_bundle_run_validity_checks(ctx: "context"):
if ctx.attrs.extension == None:
fail("`extension` attribute is required")
def _get_debuggable_deps(ctx: "context", binary_output: AppleBundleBinaryOutput.type) -> AppleBundleDebuggableInfo.type:
# `label` captures configuration as well, so it's safe to use for comparison purposes
binary_label = getattr(ctx.attrs.binary, "label", None)
deps_debuggable_infos = filter(
None,
# It's allowed for `ctx.attrs.binary` to appear in `ctx.attrs.deps` as well,
# in this case, do not duplicate the debugging info for the binary coming from two paths.
[dep.get(AppleDebuggableInfo) for dep in ctx.attrs.deps if dep.label != binary_label],
)
# We don't care to process the watchkit stub binary.
binary_debuggable_info = None
if not binary_output.is_watchkit_stub_binary:
binary_debuggable_info = binary_output.debuggable_info
return AppleBundleDebuggableInfo(
binary_info = binary_debuggable_info,
dep_infos = deps_debuggable_infos,
all_infos = deps_debuggable_infos + ([binary_debuggable_info] if binary_debuggable_info else []),
)
def get_apple_bundle_part_list(ctx: "context", params: AppleBundlePartListConstructorParams.type) -> AppleBundlePartListOutput.type:
resource_part_list = None
if hasattr(ctx.attrs, "_resource_bundle") and ctx.attrs._resource_bundle != None:
resource_info = ctx.attrs._resource_bundle[AppleBundleResourceInfo]
if resource_info != None:
resource_part_list = resource_info.resource_output
if resource_part_list == None:
resource_part_list = get_apple_bundle_resource_part_list(ctx)
return AppleBundlePartListOutput(
parts = resource_part_list.resource_parts + params.binaries,
info_plist_part = resource_part_list.info_plist_part,
)
def apple_bundle_impl(ctx: "context") -> ["provider"]:
_apple_bundle_run_validity_checks(ctx)
binary_outputs = _get_binary(ctx)
all_binary_parts, primary_binary_part = _get_binary_bundle_parts(ctx, binary_outputs)
apple_bundle_part_list_output = get_apple_bundle_part_list(ctx, AppleBundlePartListConstructorParams(binaries = all_binary_parts))
sub_targets = {}
linker_maps_directory, linker_map_info = _linker_maps_data(ctx)
sub_targets["linker-maps"] = [DefaultInfo(default_output = linker_maps_directory)]
bundle_debuggable_info = _get_debuggable_deps(ctx, binary_outputs)
binary_dsym_artifacts = getattr(bundle_debuggable_info.binary_info, "dsyms", [])
dep_dsym_artifacts = flatten([info.dsyms for info in bundle_debuggable_info.dep_infos])
dsym_artifacts = binary_dsym_artifacts + dep_dsym_artifacts
if dsym_artifacts:
sub_targets[DSYM_SUBTARGET] = [DefaultInfo(default_outputs = dsym_artifacts)]
external_debug_info = maybe_external_debug_info(
actions = ctx.actions,
label = ctx.label,
children = [info.external_debug_info for info in bundle_debuggable_info.all_infos],
)
sub_targets[DEBUGINFO_SUBTARGET] = [
DefaultInfo(
other_outputs = project_external_debug_info(
actions = ctx.actions,
label = ctx.label,
infos = [external_debug_info],
),
),
]
dsym_info = get_apple_dsym_info(ctx, binary_dsyms = binary_dsym_artifacts, dep_dsyms = dep_dsym_artifacts)
sub_targets[DSYM_INFO_SUBTARGET] = [
DefaultInfo(default_output = dsym_info, other_outputs = dsym_artifacts),
]
bundle = bundle_output(ctx)
assemble_bundle(ctx, bundle, apple_bundle_part_list_output.parts, apple_bundle_part_list_output.info_plist_part)
sub_targets[_PLIST] = [DefaultInfo(default_output = apple_bundle_part_list_output.info_plist_part.source)]
sub_targets[_XCTOOLCHAIN_SUB_TARGET] = ctx.attrs._apple_xctoolchain.providers
# Define the xcode data sub target
xcode_data_default_info, xcode_data_info = generate_xcode_data(ctx, "apple_bundle", bundle, _xcode_populate_attributes, processed_info_plist = apple_bundle_part_list_output.info_plist_part.source)
sub_targets[XCODE_DATA_SUB_TARGET] = xcode_data_default_info
install_data = generate_install_data(ctx)
primary_binary_rel_path = get_apple_bundle_part_relative_destination_path(ctx, primary_binary_part)
primary_binary_path = cmd_args([bundle, primary_binary_rel_path], delimiter = "/")
run_cmd = cmd_args(primary_binary_path).hidden(bundle)
return [
DefaultInfo(default_output = bundle, sub_targets = sub_targets),
AppleBundleInfo(
bundle = bundle,
binary_name = get_product_name(ctx),
is_watchos = get_is_watch_bundle(ctx),
contains_watchapp = is_any(lambda part: part.destination == AppleBundleDestination("watchapp"), apple_bundle_part_list_output.parts),
skip_copying_swift_stdlib = ctx.attrs.skip_copying_swift_stdlib,
),
AppleDebuggableInfo(dsyms = dsym_artifacts, external_debug_info = external_debug_info),
InstallInfo(
installer = ctx.attrs._apple_toolchain[AppleToolchainInfo].installer,
files = {
"app_bundle": bundle,
"options": install_data,
},
),
RunInfo(args = run_cmd),
linker_map_info,
xcode_data_info,
]
def _xcode_populate_attributes(ctx, processed_info_plist: "artifact") -> {str.type: ""}:
data = {
"deployment_version": get_bundle_min_target_version(ctx),
"info_plist": ctx.attrs.info_plist,
"processed_info_plist": processed_info_plist,
"product_name": get_product_name(ctx),
"sdk": get_apple_sdk_name(ctx),
}
apple_xcode_data_add_xctoolchain(ctx, data)
return data
def _linker_maps_data(ctx: "context") -> ("artifact", AppleBundleLinkerMapInfo.type):
deps_with_binary = ctx.attrs.deps + ([ctx.attrs.binary] if ctx.attrs.binary != None else [])
deps_linker_map_infos = filter(
None,
[dep.get(AppleBundleLinkerMapInfo) for dep in deps_with_binary],
)
deps_linker_maps = flatten([info.linker_maps for info in deps_linker_map_infos])
all_maps = {map.basename: map for map in deps_linker_maps}
directory = ctx.actions.copied_dir(
"LinkMap",
all_maps,
)
provider = AppleBundleLinkerMapInfo(linker_maps = all_maps.values())
return (directory, provider)
def generate_install_data(
ctx: "context",
populate_rule_specific_attributes_func: ["function", None] = None,
**kwargs) -> "artifact":
data = {
"fullyQualifiedName": ctx.label,
## TODO(T110665037): populate full path similar to bundle_spec.json
"info_plist": ctx.attrs.info_plist,
"use_idb": "true",
## TODO(T110665037): read from .buckconfig
# We require the user to have run `xcode-select` and `/var/db/xcode_select_link` to symlink
# to the selected Xcode. e.g: `/Applications/Xcode_14.2.app/Contents/Developer`
"xcode_developer_path": "/var/db/xcode_select_link",
}
if populate_rule_specific_attributes_func:
data.update(populate_rule_specific_attributes_func(ctx, **kwargs))
return ctx.actions.write_json(_INSTALL_DATA_FILE_NAME, data)

View File

@ -0,0 +1,27 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
def _maybe_get_bool(config: str.type, default: [None, bool.type]) -> [None, bool.type]:
result = read_config("apple", config, None)
if result == None:
return default
return result.lower() == "true"
def apple_bundle_config() -> {str.type: ""}:
return {
"_bundling_cache_buster": read_config("apple", "bundling_cache_buster", None),
"_bundling_log_file_enabled": _maybe_get_bool("bundling_log_file_enabled", False),
"_codesign_type": read_config("apple", "codesign_type_override", None),
"_compile_resources_locally_override": _maybe_get_bool("compile_resources_locally_override", None),
"_dry_run_code_signing": _maybe_get_bool("dry_run_code_signing", False),
# This is a kill switch for the feature, it can also be disabled by setting
# `apple.fast_adhoc_signing_enabled=false` in a global buckconfig file.
"_fast_adhoc_signing_enabled": _maybe_get_bool("fast_adhoc_signing_enabled", True),
"_incremental_bundling_enabled": _maybe_get_bool("incremental_bundling_enabled", True),
"_profile_bundling_enabled": _maybe_get_bool("profile_bundling_enabled", False),
"_use_entitlements_when_adhoc_code_signing": _maybe_get_bool("use_entitlements_when_adhoc_code_signing", None),
}

View File

@ -0,0 +1,132 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:paths.bzl", "paths")
# Abstraction of a place in a resulting bundle where file or directory will be copied. Actual value
# of path relative to bundle root depends on a platform. This class is an implementation detail and
# is not exposed to user unlike `AppleResourceDestination`.
# v1 code is `com/facebook/buck/apple/AppleBundleDestination.java`
AppleBundleDestination = enum(
"resources",
"frameworks",
"executables",
"plugins",
"xpcservices",
"metadata",
"watchapp",
"headers",
"modules",
"quicklook",
"watchkitstub",
"bundleroot",
"loginitems",
)
AppleBundleDestinationPaths = record(
resources = field(str.type, ""),
frameworks = field(str.type, ""),
executables = field(str.type, ""),
plugins = field(str.type, ""),
xpcservices = field(str.type, ""),
metadata = field(str.type, ""),
watchapp = field(str.type, ""),
headers = field(str.type, ""),
modules = field(str.type, ""),
quicklook = field(str.type, ""),
watchkitstub = field(str.type, ""),
bundleroot = field(str.type, ""),
loginitems = field(str.type, ""),
)
_IOSBundleDestinationPaths = AppleBundleDestinationPaths(
frameworks = "Frameworks",
plugins = "PlugIns",
xpcservices = "XPCServices",
watchapp = "Watch",
quicklook = "Library/QuickLook",
watchkitstub = "_WatchKitStub",
)
_IOSFrameworkBundleDestinationPaths = AppleBundleDestinationPaths(
frameworks = "Frameworks",
xpcservices = "XPCServices",
headers = "Headers",
modules = "Modules",
)
macOS_content_path = "Contents"
_MacOSBundleDestinationPaths = AppleBundleDestinationPaths(
resources = paths.join(macOS_content_path, "Resources"),
frameworks = paths.join(macOS_content_path, "Frameworks"),
executables = paths.join(macOS_content_path, "MacOS"),
plugins = paths.join(macOS_content_path, "PlugIns"),
xpcservices = paths.join(macOS_content_path, "XPCServices"),
metadata = macOS_content_path,
watchapp = macOS_content_path,
headers = macOS_content_path,
modules = macOS_content_path,
quicklook = paths.join(macOS_content_path, "Library/QuickLook"),
watchkitstub = macOS_content_path,
bundleroot = macOS_content_path,
loginitems = paths.join(macOS_content_path, "Library/LoginItems"),
)
_MacOSFrameworkBundleDestinationPaths = AppleBundleDestinationPaths(
resources = "Resources",
frameworks = "Frameworks",
xpcservices = "XPCServices",
metadata = "Resources",
headers = "Headers",
modules = "Modules",
)
def _get_apple_bundle_destinations_for_sdk_name(name: str.type) -> AppleBundleDestinationPaths.type:
if name == "macosx" or name == "maccatalyst":
return _MacOSBundleDestinationPaths
else:
return _IOSBundleDestinationPaths
def _get_apple_framework_bundle_destinations_for_sdk_name(name: str.type) -> AppleBundleDestinationPaths.type:
if name == "macosx" or name == "maccatalyst":
return _MacOSFrameworkBundleDestinationPaths
else:
return _IOSFrameworkBundleDestinationPaths
def bundle_relative_path_for_destination(destination: AppleBundleDestination.type, sdk_name: str.type, extension: str.type) -> str.type:
if extension == "framework":
bundle_destinations = _get_apple_framework_bundle_destinations_for_sdk_name(sdk_name)
else:
bundle_destinations = _get_apple_bundle_destinations_for_sdk_name(sdk_name)
if destination.value == "resources":
return bundle_destinations.resources
elif destination.value == "frameworks":
return bundle_destinations.frameworks
elif destination.value == "executables":
return bundle_destinations.executables
elif destination.value == "plugins":
return bundle_destinations.plugins
elif destination.value == "xpcservices":
return bundle_destinations.xpcservices
elif destination.value == "metadata":
return bundle_destinations.metadata
elif destination.value == "watchapp":
return bundle_destinations.watchapp
elif destination.value == "headers":
return bundle_destinations.headers
elif destination.value == "modules":
return bundle_destinations.modules
elif destination.value == "quicklook":
return bundle_destinations.quicklook
elif destination.value == "watchkitstub":
return bundle_destinations.watchkitstub
elif destination.value == "bundleroot":
return bundle_destinations.bundleroot
elif destination.value == "loginitems":
return bundle_destinations.loginitems
fail("Unsupported Apple bundle destination {}".format(destination))

View File

@ -0,0 +1,90 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(":apple_bundle_config.bzl", "apple_bundle_config")
load(":apple_info_plist_substitutions_parsing.bzl", "parse_codesign_entitlements")
_RESOURCE_BUNDLE_FIELDS = [
"asset_catalogs_compilation_options",
"binary",
"default_target_platform",
"deps",
"extension",
"ibtool_flags",
"ibtool_module_flag",
"info_plist",
"info_plist_substitutions",
"product_name",
"resource_group",
"resource_group_map",
]
def apple_bundle_macro_impl(apple_bundle_rule = None, apple_resource_bundle_rule = None, **kwargs):
info_plist_substitutions = kwargs.get("info_plist_substitutions")
kwargs.update(apple_bundle_config())
resource_bundle_target_name = None
# The `apple_resource_bundle()` target will _always_ be Xcode-based, so resources can always be used
# from there. `resources_toolchain_enabled` exists only as a killswitch (or for testing/debugging purposes).
# By default, we consistently get all resources from `apple_resource_bundle()` target across all OSes and
# toolchains.
resources_toolchain_enabled = (read_config("apple", "resources_toolchain_enabled", "true").lower() == "true")
if resources_toolchain_enabled:
resource_bundle_name = kwargs["name"] + "__ResourceBundle_Private"
resource_bundle_kwargs = {
"_bundle_target_name": kwargs["name"],
"_compile_resources_locally_override": kwargs["_compile_resources_locally_override"],
}
for field_name in _RESOURCE_BUNDLE_FIELDS:
resource_bundle_kwargs[field_name] = kwargs.get(field_name)
# TODO(T125269558): Remove usage of apple_resource_bundle() once we have exec groups.
apple_resource_bundle_rule(
name = resource_bundle_name,
**resource_bundle_kwargs
)
resource_bundle_target_name = ":{}".format(resource_bundle_name)
# Splitting the resource compilation into another rule means we can have
# different exec platforms for the resource compilation and for the rest
# of the bundling process. This allows us to send resource compilations
# directly to RE.
#
# +-------------------------------------------------+
# | apple_bundle() |
# | Exec Platform: macOS/Linux |
# | +--------+ +--------+ +------------------+ |
# +---+ binary +--+ deps +-+ _resource_bundle +---+
# +--------+ +--------+ +------------------+
# | | |
# | | |
# | | +---------------+
# | | |
# | | |
# | | v
# | | +---------------------------------+
# | +-----+ | apple_resource_bundle() |
# | | | Exec Platform: macOS-only |
# | | | +--------+ +--------+ |
# | | +-----+ binary +--+ deps +------+
# | | +--------+ +--------+
# | | | |
# | | | |
# | v | |
# | +-------------------+ | |
# | | Dependencies |<--------+-----------+
# | +-------------------+ |
# | +-------------------+ |
# +------>| Binary |<--------+
# +-------------------+
apple_bundle_rule(
_codesign_entitlements = parse_codesign_entitlements(info_plist_substitutions),
_resource_bundle = resource_bundle_target_name,
**kwargs
)

View File

@ -0,0 +1,212 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:paths.bzl", "paths")
load("@prelude//utils:utils.bzl", "expect")
load(":apple_bundle_destination.bzl", "AppleBundleDestination", "bundle_relative_path_for_destination")
load(":apple_bundle_utility.bzl", "get_extension_attr", "get_product_name")
load(":apple_code_signing_types.bzl", "AppleEntitlementsInfo", "CodeSignType")
load(":apple_sdk.bzl", "get_apple_sdk_name")
load(":apple_sdk_metadata.bzl", "get_apple_sdk_metadata_for_sdk_name")
load(":apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo")
# Defines where and what should be copied into
AppleBundlePart = record(
# A file of directory which content should be copied
source = field("artifact"),
# Where the source should be copied, the actual destination directory
# inside bundle depends on target platform
destination = AppleBundleDestination.type,
# New file name if it should be renamed before copying.
# Empty string value is applicable only when `source` is a directory,
# in such case only content of the directory will be copied, as opposed to the directory itself.
# When value is `None`, directory or file will be copied as it is, without renaming.
new_name = field([str.type, None], None),
# Marks parts which should be code signed separately from the whole bundle.
codesign_on_copy = field(bool.type, False),
)
def bundle_output(ctx: "context") -> "artifact":
bundle_dir_name = get_bundle_dir_name(ctx)
output = ctx.actions.declare_output(bundle_dir_name)
return output
def assemble_bundle(ctx: "context", bundle: "artifact", parts: [AppleBundlePart.type], info_plist_part: [AppleBundlePart.type, None]) -> None:
all_parts = parts + [info_plist_part] if info_plist_part else []
spec_file = _bundle_spec_json(ctx, all_parts)
tools = ctx.attrs._apple_tools[AppleToolsInfo]
tool = tools.assemble_bundle
codesign_args = []
codesign_type = _detect_codesign_type(ctx)
codesign_tool = ctx.attrs._apple_toolchain[AppleToolchainInfo].codesign
if ctx.attrs._dry_run_code_signing:
codesign_configuration_args = ["--codesign-configuration", "dry-run"]
codesign_tool = tools.dry_codesign_tool
elif ctx.attrs._fast_adhoc_signing_enabled:
codesign_configuration_args = ["--codesign-configuration", "fast-adhoc"]
else:
codesign_configuration_args = []
if codesign_type.value in ["distribution", "adhoc"]:
codesign_args = [
"--codesign",
"--codesign-tool",
codesign_tool,
]
external_name = get_apple_sdk_name(ctx)
platform_args = ["--platform", external_name]
codesign_args.extend(platform_args)
if codesign_type.value != "adhoc":
provisioning_profiles = ctx.attrs._provisioning_profiles[DefaultInfo]
expect(
len(provisioning_profiles.default_outputs) == 1,
"expected exactly one default output from provisioning profile",
)
provisioning_profiles_args = ["--profiles-dir"] + provisioning_profiles.default_outputs
codesign_args.extend(provisioning_profiles_args)
identities_command = ctx.attrs._apple_toolchain[AppleToolchainInfo].codesign_identities_command
identities_command_args = ["--codesign-identities-command", cmd_args(identities_command)] if identities_command else []
codesign_args.extend(identities_command_args)
else:
codesign_args.append("--ad-hoc")
codesign_args += _get_entitlements_codesign_args(ctx, codesign_type)
info_plist_args = [
"--info-plist-source",
info_plist_part.source,
"--info-plist-destination",
get_apple_bundle_part_relative_destination_path(ctx, info_plist_part),
] if info_plist_part else []
codesign_args.extend(info_plist_args)
elif codesign_type.value == "skip":
pass
else:
fail("Code sign type `{}` not supported".format(codesign_type))
command = cmd_args([
tool,
"--output",
bundle.as_output(),
"--spec",
spec_file,
] + codesign_args)
command.hidden([part.source for part in all_parts])
run_incremental_args = {}
incremental_state = ctx.actions.declare_output("incremental_state.json").as_output()
# Fallback to value from buckconfig
incremental_bundling_enabled = ctx.attrs.incremental_bundling_enabled or ctx.attrs._incremental_bundling_enabled
if incremental_bundling_enabled:
command.add("--incremental-state", incremental_state)
run_incremental_args = {
"metadata_env_var": "ACTION_METADATA",
"metadata_path": "action_metadata.json",
"no_outputs_cleanup": True,
}
category = "apple_assemble_bundle_incremental"
else:
# overwrite file with incremental state so if previous and next builds are incremental
# (as opposed to the current non-incremental one), next one won't assume there is a
# valid incremental state.
command.hidden(ctx.actions.write_json(incremental_state, {}))
category = "apple_assemble_bundle"
if ctx.attrs._profile_bundling_enabled:
profile_output = ctx.actions.declare_output("bundling_profile.txt").as_output()
command.add("--profile-output", profile_output)
if ctx.attrs._bundling_log_file_enabled:
bundling_log_output = ctx.actions.declare_output("bundling_log.txt").as_output()
command.add("--log-file", bundling_log_output)
command.add(codesign_configuration_args)
env = {}
cache_buster = ctx.attrs._bundling_cache_buster
if cache_buster:
env["BUCK2_BUNDLING_CACHE_BUSTER"] = cache_buster
force_local_bundling = codesign_type.value != "skip"
ctx.actions.run(
command,
local_only = force_local_bundling,
prefer_local = not force_local_bundling,
category = category,
env = env,
**run_incremental_args
)
def get_bundle_dir_name(ctx: "context") -> str.type:
return paths.replace_extension(get_product_name(ctx), "." + get_extension_attr(ctx))
def get_apple_bundle_part_relative_destination_path(ctx: "context", part: AppleBundlePart.type) -> str.type:
bundle_relative_path = bundle_relative_path_for_destination(part.destination, get_apple_sdk_name(ctx), ctx.attrs.extension)
destination_file_or_directory_name = part.new_name if part.new_name != None else paths.basename(part.source.short_path)
return paths.join(bundle_relative_path, destination_file_or_directory_name)
# Returns JSON to be passed into bundle assembling tool. It should contain a dictionary which maps bundle relative destination paths to source paths."
def _bundle_spec_json(ctx: "context", parts: [AppleBundlePart.type]) -> "artifact":
specs = []
for part in parts:
part_spec = {
"dst": get_apple_bundle_part_relative_destination_path(ctx, part),
"src": part.source,
}
if part.codesign_on_copy:
part_spec["codesign_on_copy"] = True
specs.append(part_spec)
return ctx.actions.write_json("bundle_spec.json", specs)
def _detect_codesign_type(ctx: "context") -> CodeSignType.type:
if ctx.attrs.extension not in ["app", "appex"]:
# Only code sign application bundles and extensions
return CodeSignType("skip")
if ctx.attrs._codesign_type:
return CodeSignType(ctx.attrs._codesign_type)
sdk_name = get_apple_sdk_name(ctx)
is_ad_hoc_sufficient = get_apple_sdk_metadata_for_sdk_name(sdk_name).is_ad_hoc_code_sign_sufficient
return CodeSignType("adhoc" if is_ad_hoc_sufficient else "distribution")
def _entitlements_file(ctx: "context") -> ["artifact", None]:
if not ctx.attrs.binary:
return None
# The `binary` attribute can be either an apple_binary or a dynamic library from apple_library
binary_entitlement_info = ctx.attrs.binary[AppleEntitlementsInfo]
if binary_entitlement_info and binary_entitlement_info.entitlements_file:
return binary_entitlement_info.entitlements_file
return ctx.attrs._codesign_entitlements
def _should_include_entitlements(ctx: "context", codesign_type: CodeSignType.type) -> bool.type:
if codesign_type.value == "distribution":
return True
if codesign_type.value == "adhoc":
# The config-based override value takes priority over target value
if ctx.attrs._use_entitlements_when_adhoc_code_signing != None:
return ctx.attrs._use_entitlements_when_adhoc_code_signing
return ctx.attrs.use_entitlements_when_adhoc_code_signing
return False
def _get_entitlements_codesign_args(ctx: "context", codesign_type: CodeSignType.type) -> ["_arglike"]:
include_entitlements = _should_include_entitlements(ctx, codesign_type)
maybe_entitlements = _entitlements_file(ctx) if include_entitlements else None
entitlements_args = ["--entitlements", maybe_entitlements] if maybe_entitlements else []
return entitlements_args

View File

@ -0,0 +1,358 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:paths.bzl", "paths")
load("@prelude//:resources.bzl", "gather_resources")
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load("@prelude//utils:utils.bzl", "expect", "flatten_dict")
load(
":apple_asset_catalog.bzl",
"compile_apple_asset_catalog",
)
load(
":apple_asset_catalog_types.bzl",
"AppleAssetCatalogSpec", # @unused Used as a type
)
load(":apple_bundle_destination.bzl", "AppleBundleDestination")
load(":apple_bundle_part.bzl", "AppleBundlePart")
load(":apple_bundle_types.bzl", "AppleBundleInfo")
load(":apple_bundle_utility.bzl", "get_bundle_resource_processing_options", "get_extension_attr", "get_product_name")
load(":apple_core_data.bzl", "compile_apple_core_data")
load(
":apple_core_data_types.bzl",
"AppleCoreDataSpec", # @unused Used as a type
)
load(":apple_info_plist.bzl", "process_info_plist", "process_plist")
load(
":apple_resource_types.bzl",
"AppleResourceDestination",
"AppleResourceSpec", # @unused Used as a type
)
load(":apple_resource_utility.bzl", "apple_bundle_destination_from_resource_destination")
load(
":resource_groups.bzl",
"create_resource_graph",
"get_filtered_resources",
"get_resource_graph_node_map_func",
"get_resource_group_info",
)
AppleBundleResourcePartListOutput = record(
# Resource parts to be copied into an Apple bundle, *excluding* binaries
resource_parts = field([AppleBundlePart.type]),
# Part that holds the info.plist
info_plist_part = field(AppleBundlePart.type),
)
def get_apple_bundle_resource_part_list(ctx: "context") -> AppleBundleResourcePartListOutput.type:
parts = []
parts.extend(_create_pkg_info_if_needed(ctx))
(resource_specs, asset_catalog_specs, core_data_specs) = _select_resources(ctx)
# If we've pulled in native/C++ resources from deps, inline them into the
# bundle under the `CxxResources` namespace.
cxx_resources = flatten_dict(gather_resources(
label = ctx.label,
deps = ctx.attrs.deps,
).values())
if cxx_resources:
cxx_res_dir = ctx.actions.copied_dir(
"CxxResources",
{
name: resource
for name, (resource, _) in cxx_resources.items()
},
)
resource_specs.append(
AppleResourceSpec(
dirs = [cxx_res_dir],
destination = AppleResourceDestination("resources"),
),
)
asset_catalog_result = compile_apple_asset_catalog(ctx, asset_catalog_specs)
if asset_catalog_result != None:
asset_catalog_part = AppleBundlePart(
source = asset_catalog_result.compiled_catalog,
destination = AppleBundleDestination("resources"),
# We only interested in directory contents
new_name = "",
)
parts.append(asset_catalog_part)
extra_plist = asset_catalog_result.catalog_plist if asset_catalog_result != None else None
info_plist_part = process_info_plist(ctx = ctx, override_input = extra_plist)
core_data_result = compile_apple_core_data(ctx, core_data_specs, get_product_name(ctx))
if core_data_result != None:
core_data_part = AppleBundlePart(
source = core_data_result,
destination = AppleBundleDestination("resources"),
# We only interested in directory contents
new_name = "",
)
parts.append(core_data_part)
parts.extend(_copy_resources(ctx, resource_specs))
parts.extend(_copy_first_level_bundles(ctx))
return AppleBundleResourcePartListOutput(
resource_parts = parts,
info_plist_part = info_plist_part,
)
# Same logic as in v1, see `buck_client/src/com/facebook/buck/apple/ApplePkgInfo.java`
def _create_pkg_info_if_needed(ctx: "context") -> [AppleBundlePart.type]:
extension = get_extension_attr(ctx)
if extension == "xpc" or extension == "qlgenerator":
return []
artifact = ctx.actions.write("PkgInfo", "APPLWRUN\n")
return [AppleBundlePart(source = artifact, destination = AppleBundleDestination("metadata"))]
def _select_resources(ctx: "context") -> (([AppleResourceSpec.type], [AppleAssetCatalogSpec.type], [AppleCoreDataSpec.type])):
resource_group_info = get_resource_group_info(ctx)
if resource_group_info:
resource_groups_deps = [mapping.root.node for group in resource_group_info.groups for mapping in group.mappings if mapping.root != None]
resource_group_mappings = resource_group_info.mappings
else:
resource_groups_deps = []
resource_group_mappings = {}
deps = ctx.attrs.deps + filter(None, [ctx.attrs.binary])
resource_graph = create_resource_graph(
ctx = ctx,
labels = [],
deps = deps + resource_groups_deps,
exported_deps = [],
)
resource_graph_node_map_func = get_resource_graph_node_map_func(resource_graph)
return get_filtered_resources(ctx.label, resource_graph_node_map_func, ctx.attrs.resource_group, resource_group_mappings)
def _copy_resources(ctx: "context", specs: [AppleResourceSpec.type]) -> [AppleBundlePart.type]:
result = []
for spec in specs:
bundle_destination = apple_bundle_destination_from_resource_destination(spec.destination)
result += [_process_apple_resource_file_if_needed(
ctx = ctx,
file = _extract_single_artifact(x),
destination = bundle_destination,
destination_relative_path = None,
codesign_on_copy = spec.codesign_files_on_copy,
) for x in spec.files]
result += _bundle_parts_for_dirs(spec.dirs, bundle_destination, False)
result += _bundle_parts_for_dirs(spec.content_dirs, bundle_destination, True)
result += _bundle_parts_for_variant_files(ctx, spec)
return result
def _extract_single_artifact(x: ["dependency", "artifact"]) -> "artifact":
if type(x) == "artifact":
return x
else:
# Otherwise, this is a dependency, so extract the resource and other
# resources from the `DefaultInfo` provider.
info = x[DefaultInfo]
expect(
len(info.default_outputs) == 1,
"expected exactly one default output from {} ({})"
.format(x, info.default_outputs),
)
return info.default_outputs[0]
def _copy_first_level_bundles(ctx: "context") -> [AppleBundlePart.type]:
first_level_bundle_infos = filter(None, [dep.get(AppleBundleInfo) for dep in ctx.attrs.deps])
return filter(None, [_copied_bundle_spec(info) for info in first_level_bundle_infos])
def _copied_bundle_spec(bundle_info: AppleBundleInfo.type) -> [None, AppleBundlePart.type]:
bundle = bundle_info.bundle
bundle_extension = paths.split_extension(bundle.short_path)[1]
if bundle_extension == ".framework":
destination = AppleBundleDestination("frameworks")
codesign_on_copy = True
elif bundle_extension == ".app":
expect(bundle_info.is_watchos != None, "Field should be set for bundles with extension {}".format(bundle_extension))
destination = AppleBundleDestination("watchapp" if bundle_info.is_watchos else "plugins")
codesign_on_copy = False
elif bundle_extension == ".appex":
destination = AppleBundleDestination("plugins")
codesign_on_copy = False
else:
fail("Extension `{}` is not yet supported.".format(bundle_extension))
return AppleBundlePart(
source = bundle,
destination = destination,
codesign_on_copy = codesign_on_copy,
)
def _bundle_parts_for_dirs(generated_dirs: ["artifact"], destination: AppleBundleDestination.type, copy_contents_only: bool.type) -> [AppleBundlePart.type]:
return [AppleBundlePart(
source = generated_dir,
destination = destination,
new_name = "" if copy_contents_only else None,
) for generated_dir in generated_dirs]
def _bundle_parts_for_variant_files(ctx: "context", spec: AppleResourceSpec.type) -> [AppleBundlePart.type]:
result = []
# By definition, all variant files go into the resources destination
bundle_destination = AppleBundleDestination("resources")
for variant_file in spec.variant_files:
variant_dest_subpath = _get_dest_subpath_for_variant_file(variant_file)
bundle_part = _process_apple_resource_file_if_needed(
ctx = ctx,
file = variant_file,
destination = bundle_destination,
destination_relative_path = variant_dest_subpath,
)
result.append(bundle_part)
for locale, variant_files in spec.named_variant_files.items():
if not locale.endswith(".lproj"):
fail("Keys for named variant files have to end with '.lproj' suffix, got {}".format(locale))
result += [
_process_apple_resource_file_if_needed(
ctx = ctx,
file = variant_file,
destination = bundle_destination,
destination_relative_path = paths.join(locale, paths.basename(variant_file.short_path)),
)
for variant_file in variant_files
]
return result
def _run_ibtool(
ctx: "context",
raw_file: "artifact",
output: "output_artifact",
action_flags: [str.type],
target_device: [None, str.type],
action_identifier: str.type,
output_is_dir: bool.type) -> None:
# TODO(T110378103): detect and add minimum deployment target automatically
# TODO(T110378113): add support for ibtool modules (turned on by `ibtool_module_flag` field of `apple_bundle` rule)
# Equivalent of `AppleProcessResources::BASE_IBTOOL_FLAGS` from v1
base_flags = ["--output-format", "human-readable-text", "--notices", "--warnings", "--errors"]
ibtool = ctx.attrs._apple_toolchain[AppleToolchainInfo].ibtool
ibtool_flags = getattr(ctx.attrs, "ibtool_flags", None) or []
ibtool_command = [ibtool] + base_flags + ibtool_flags
if target_device != None:
ibtool_command.extend(["--target-device", target_device])
ibtool_command.extend(action_flags)
if output_is_dir:
ibtool_command.append('"$TMPDIR"')
else:
ibtool_command.append(output)
ibtool_command.append(raw_file)
if output_is_dir:
# Sandboxing and fs isolation on RE machines results in Xcode tools failing
# when those are working in freshly created directories in buck-out.
# See https://fb.workplace.com/groups/1042353022615812/permalink/1872164996301273/
# As a workaround create a directory in tmp, use it for Xcode tools, then
# copy the result to buck-out.
wrapper_script, _ = ctx.actions.write(
"ibtool_wrapper.sh",
[
cmd_args('export TMPDIR="$(mktemp -d)"'),
cmd_args(cmd_args(ibtool_command), delimiter = " "),
cmd_args(output, format = 'mkdir -p {} && cp -r "$TMPDIR"/ {}'),
],
allow_args = True,
)
command = cmd_args(["/bin/sh", wrapper_script]).hidden([ibtool_command, output])
else:
command = ibtool_command
processing_options = get_bundle_resource_processing_options(ctx)
ctx.actions.run(command, prefer_local = processing_options.prefer_local, allow_cache_upload = processing_options.allow_cache_upload, category = "apple_ibtool", identifier = action_identifier)
def _compile_ui_resource(
ctx: "context",
raw_file: "artifact",
output: "output_artifact",
target_device: [None, str.type] = None,
output_is_dir: bool.type = False) -> None:
_run_ibtool(
ctx = ctx,
raw_file = raw_file,
output = output,
action_flags = ["--compile"],
target_device = target_device,
action_identifier = "compile_" + raw_file.basename,
output_is_dir = output_is_dir,
)
def _link_ui_resource(
ctx: "context",
raw_file: "artifact",
output: "output_artifact",
target_device: [None, str.type] = None,
output_is_dir: bool.type = False) -> None:
_run_ibtool(
ctx = ctx,
raw_file = raw_file,
output = output,
action_flags = ["--link"],
target_device = target_device,
action_identifier = "link_" + raw_file.basename,
output_is_dir = output_is_dir,
)
def _process_apple_resource_file_if_needed(
ctx: "context",
file: "artifact",
destination: AppleBundleDestination.type,
destination_relative_path: [str.type, None],
codesign_on_copy: bool.type = False) -> AppleBundlePart.type:
output_dir = "_ProcessedResources"
basename = paths.basename(file.short_path)
output_is_contents_dir = False
if basename.endswith(".plist") or basename.endswith(".stringsdict"):
processed = ctx.actions.declare_output(paths.join(output_dir, file.short_path))
process_plist(
ctx = ctx,
input = file,
output = processed.as_output(),
action_id = destination_relative_path,
)
elif basename.endswith(".storyboard"):
compiled = ctx.actions.declare_output(paths.join(output_dir, paths.replace_extension(file.short_path, ".storyboardc")), dir = True)
if get_is_watch_bundle(ctx):
output_is_contents_dir = True
_compile_ui_resource(ctx = ctx, raw_file = file, output = compiled.as_output(), target_device = "watch")
processed = ctx.actions.declare_output(paths.join(output_dir, paths.replace_extension(file.short_path, "_linked_storyboard")), dir = True)
_link_ui_resource(ctx = ctx, raw_file = compiled, output = processed.as_output(), target_device = "watch", output_is_dir = True)
else:
processed = compiled
_compile_ui_resource(ctx, file, processed.as_output())
elif basename.endswith(".xib"):
processed = ctx.actions.declare_output(paths.join(output_dir, paths.replace_extension(file.short_path, ".nib")))
_compile_ui_resource(ctx, file, processed.as_output())
else:
processed = file
# When name is empty string only content of the directory will be copied, as opposed to the directory itself.
# When name is `None`, directory or file will be copied as it is, without renaming.
new_name = destination_relative_path if destination_relative_path else ("" if output_is_contents_dir else None)
return AppleBundlePart(source = processed, destination = destination, new_name = new_name, codesign_on_copy = codesign_on_copy)
# Returns a path relative to the _parent_ of the lproj dir.
# For example, given a variant file with a short path of`XX/YY.lproj/ZZ`
# it would return `YY.lproj/ZZ`.
def _get_dest_subpath_for_variant_file(variant_file: "artifact") -> str.type:
dir_name = paths.basename(paths.dirname(variant_file.short_path))
if not dir_name.endswith("lproj"):
fail("Variant files have to be in a directory with name ending in '.lproj' but `{}` was not.".format(variant_file.short_path))
file_name = paths.basename(variant_file.short_path)
return paths.join(dir_name, file_name)
def get_is_watch_bundle(ctx: "context") -> bool.type:
return ctx.attrs._apple_toolchain[AppleToolchainInfo].watch_kit_stub_binary != None

View File

@ -0,0 +1,43 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
# Provider flagging that result of the rule contains Apple bundle.
# It might be copied into main bundle to appropriate place if rule
# with this provider is a dependency of `apple_bundle`.
AppleBundleInfo = provider(fields = [
# Result bundle; `artifact`
"bundle",
# The name of the executable within the bundle.
# `str.type`
"binary_name",
# If the bundle was built for watchOS Apple platform, this affects packaging.
# Might be omitted for certain types of bundle (e.g. frameworks) when packaging doesn't depend on it.
# [None, `bool.type`]
"is_watchos",
# If the bundle contains a Watch Extension executable, we have to update the packaging.
# Similar to `is_watchos`, this might be omitted for certain types of bundles which don't depend on it.
# [None, `bool.type`]
"contains_watchapp",
# By default, non-framework, non-appex binaries copy Swift libraries into the final
# binary. This is the opt-out for that.
# [None, `bool.type`]
"skip_copying_swift_stdlib",
])
# Provider which helps to propagate minimum deployment version up the target graph.
AppleMinDeploymentVersionInfo = provider(fields = [
# `str.type`
"version",
])
AppleBundleResourceInfo = provider(fields = [
"resource_output", # AppleBundleResourcePartListOutput.type
])
AppleBundleLinkerMapInfo = provider(fields = [
"linker_maps", # ["artifact"]
])

View File

@ -0,0 +1,85 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//utils:utils.bzl", "flatten", "value_or")
load(":apple_bundle_types.bzl", "AppleBundleLinkerMapInfo", "AppleMinDeploymentVersionInfo")
load(":apple_resource_types.bzl", "AppleResourceProcessingOptions")
load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node")
load(":apple_toolchain_types.bzl", "AppleToolchainInfo")
load(":resource_groups.bzl", "ResourceGraph")
# `ctx` in all functions below is expected to be of `apple_bundle` or `apple_test` rule
def _get_bundle_target_name(ctx: "context"):
if hasattr(ctx.attrs, "_bundle_target_name"):
# `apple_resource_bundle` rules are proxies for the real rules,
# so make sure we return the real target name rather the proxy one
return ctx.attrs._bundle_target_name
return ctx.attrs.name
def get_product_name(ctx: "context") -> str.type:
return ctx.attrs.product_name if hasattr(ctx.attrs, "product_name") and ctx.attrs.product_name != None else _get_bundle_target_name(ctx)
def get_extension_attr(ctx: "context") -> "":
return ctx.attrs.extension
# Derives the effective deployment target for the bundle. It's
# usually the deployment target of the binary if present,
# otherwise it falls back to other values (see implementation).
def get_bundle_min_target_version(ctx: "context") -> str.type:
binary_min_version = None
# Could be not set for e.g. watchOS bundles which have a stub
# binary that comes from the apple_toolchain(), not from the
# apple_bundle() itself (i.e., binary field will be None).
#
# TODO(T114147746): The top-level stub bundle for a watchOS app
# does not have the ability to set its deployment target via
# a binary (as that field is empty). If it contains asset
# catalogs (can it?), we need to use correct target version.
#
# The solution might to be support SDK version from
# Info.plist (T110378109).
if ctx.attrs.binary != None:
min_version_info = ctx.attrs.binary[AppleMinDeploymentVersionInfo]
if min_version_info != None:
binary_min_version = min_version_info.version
fallback_min_version = get_min_deployment_version_for_node(ctx)
min_version = binary_min_version or fallback_min_version
if min_version != None:
return min_version
# TODO(T110378109): support default value from SDK `Info.plist`
fail("Could not determine min target sdk version for bundle: {}".format(ctx.label))
def get_bundle_resource_processing_options(ctx: "context") -> AppleResourceProcessingOptions.type:
compile_resources_locally = value_or(ctx.attrs._compile_resources_locally_override, ctx.attrs._apple_toolchain[AppleToolchainInfo].compile_resources_locally)
return AppleResourceProcessingOptions(prefer_local = compile_resources_locally, allow_cache_upload = compile_resources_locally)
def get_bundle_infos_from_graph(graph: ResourceGraph.type) -> [AppleBundleLinkerMapInfo.type]:
bundle_infos = []
for node in graph.nodes.traverse():
if not node.resource_spec:
continue
resource_spec = node.resource_spec
for artifact in resource_spec.files:
if type(artifact) != "dependency":
continue
bundle_info = artifact.get(AppleBundleLinkerMapInfo)
if bundle_info:
bundle_infos.append(bundle_info)
return bundle_infos
def merge_bundle_linker_maps_info(infos: [AppleBundleLinkerMapInfo.type]) -> AppleBundleLinkerMapInfo.type:
return AppleBundleLinkerMapInfo(
linker_maps = flatten([info.linker_maps for info in infos]),
)

View File

@ -0,0 +1,18 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
# Provider which exposes a field from `apple_binary` to `apple_bundle` as it might be used during code signing.
AppleEntitlementsInfo = provider(fields = [
# Optional "artifact"
"entitlements_file",
])
CodeSignType = enum(
"skip",
"adhoc",
"distribution",
)

View File

@ -0,0 +1,69 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_bundle_resource_processing_options")
load(":apple_core_data_types.bzl", "AppleCoreDataSpec")
load(":apple_sdk.bzl", "get_apple_sdk_name")
load(":resource_groups.bzl", "create_resource_graph")
def apple_core_data_impl(ctx: "context") -> ["provider"]:
spec = AppleCoreDataSpec(
path = ctx.attrs.path,
)
graph = create_resource_graph(
ctx = ctx,
labels = ctx.attrs.labels,
deps = [],
exported_deps = [],
core_data_spec = spec,
)
return [DefaultInfo(), graph]
def compile_apple_core_data(ctx: "context", specs: [AppleCoreDataSpec.type], product_name: str.type) -> ["artifact", None]:
if len(specs) == 0:
return None
output = ctx.actions.declare_output("AppleCoreDataCompiled")
# Aggregate all the coredata momc commands together
momc_commands = []
for spec in specs:
momc_command = _get_momc_command(ctx, spec, product_name, cmd_args("$TMPDIR"))
momc_commands.append(momc_command)
# Sandboxing and fs isolation on RE machines results in Xcode tools failing
# when those are working in freshly created directories in buck-out.
# See https://fb.workplace.com/groups/1042353022615812/permalink/1872164996301273/
# As a workaround create a directory in tmp, use it for Xcode tools, then
# copy the result to buck-out.
wrapper_script, _ = ctx.actions.write(
"momc_wrapper.sh",
[
cmd_args('export TMPDIR="$(mktemp -d)"'),
cmd_args(momc_commands),
cmd_args(output, format = 'mkdir -p {} && cp -r "$TMPDIR"/ {}'),
],
allow_args = True,
)
combined_command = cmd_args(["/bin/sh", wrapper_script]).hidden(momc_commands + [output.as_output()])
processing_options = get_bundle_resource_processing_options(ctx)
ctx.actions.run(combined_command, prefer_local = processing_options.prefer_local, allow_cache_upload = processing_options.allow_cache_upload, category = "apple_core_data")
return output
def _get_momc_command(ctx: "context", core_data_spec: AppleCoreDataSpec.type, product_name: str.type, output_directory: "cmd_args") -> "cmd_args":
return cmd_args([
ctx.attrs._apple_toolchain[AppleToolchainInfo].momc,
"--sdkroot",
ctx.attrs._apple_toolchain[AppleToolchainInfo].sdk_path,
"--" + get_apple_sdk_name(ctx) + "-deployment-target",
get_bundle_min_target_version(ctx),
"--module",
product_name,
core_data_spec.path,
output_directory,
], delimiter = " ")

View File

@ -0,0 +1,10 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
AppleCoreDataSpec = record(
path = field("artifact"),
)

View File

@ -0,0 +1,63 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
DSYM_SUBTARGET = "dsym"
DSYM_INFO_SUBTARGET = "dsym-info"
DWARF_AND_DSYM_SUBTARGET = "dwarf-and-dsym"
DEBUGINFO_SUBTARGET = "debuginfo"
AppleDebuggableInfo = provider(fields = [
"dsyms", # ["artifact"]
"external_debug_info", # "transitive_set"
])
AppleBundleDebuggableInfo = record(
# Can be `None` for WatchKit stub
binary_info = field([AppleDebuggableInfo.type, None]),
dep_infos = field([AppleDebuggableInfo.type]),
# Concat of `binary_info` and `dep_infos`
all_infos = field([AppleDebuggableInfo.type]),
)
# TODO(T110672942): Things which are still unsupported:
# - pass in dsymutil_extra_flags
# - oso_prefix
# - dsym_verification
def get_apple_dsym(ctx: "context", executable: "artifact", external_debug_info: ["_arglike"], action_identifier: "string", output_path_override: ["string", None] = None) -> "artifact":
dsymutil = ctx.attrs._apple_toolchain[AppleToolchainInfo].dsymutil
output_path = output_path_override or "{}.dSYM".format(executable.short_path)
output = ctx.actions.declare_output(output_path, dir = True)
cmd = cmd_args([dsymutil, "-o", output.as_output(), executable])
# Mach-O executables don't contain DWARF data.
# Instead, they contain paths to the object files which themselves contain DWARF data.
#
# So, those object files are needed for dsymutil to be to create the dSYM bundle.
cmd.hidden(external_debug_info)
ctx.actions.run(cmd, category = "apple_dsym", identifier = action_identifier)
return output
def get_apple_dsym_info(ctx: "context", binary_dsyms: ["artifact"], dep_dsyms: ["artifact"]) -> "artifact":
dsym_info = {}
# WatchOS stub does not have a dSYM, so it's possible that we get zero `binary_dsyms`
if len(binary_dsyms) == 1:
dsym_info["binary"] = binary_dsyms[0]
elif len(binary_dsyms) > 1:
fail("There cannot be more than one binary dSYM")
if dep_dsyms:
# `dedupe` needed as it's possible for the same dSYM to bubble up
# through multiple paths in a graph (e.g., including both a binary
# + bundle in the `deps` field of a parent bundle).
dsym_info["deps"] = dedupe(dep_dsyms)
return ctx.actions.write_json("dsym-info.json", dsym_info)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,168 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:paths.bzl", "paths")
load(
"@prelude//linking:link_info.bzl",
"FrameworksLinkable",
"LinkArgs",
"LinkInfo",
"LinkInfos",
"LinkInfosTSet",
"LinkableType",
"get_link_args",
"merge_framework_linkables",
)
load("@prelude//utils:utils.bzl", "expect")
load(":apple_framework_versions.bzl", "get_framework_linker_args")
load(":apple_toolchain_types.bzl", "AppleToolchainInfo")
_IMPLICIT_SDKROOT_FRAMEWORK_SEARCH_PATHS = [
"$SDKROOT/Library/Frameworks",
"$SDKROOT/System/Library/Frameworks",
]
def create_frameworks_linkable(ctx: "context") -> [FrameworksLinkable.type, None]:
if not ctx.attrs.libraries and not ctx.attrs.frameworks:
return None
return FrameworksLinkable(
library_names = [_library_name(x) for x in ctx.attrs.libraries],
unresolved_framework_paths = _get_non_sdk_unresolved_framework_directories(ctx.attrs.frameworks),
framework_names = [to_framework_name(x) for x in ctx.attrs.frameworks],
)
def _get_apple_frameworks_linker_flags(ctx: "context", linkable: [FrameworksLinkable.type, None]) -> "cmd_args":
if not linkable:
return cmd_args()
expanded_frameworks_paths = _expand_sdk_framework_paths(ctx, linkable.unresolved_framework_paths)
flags = _get_framework_search_path_flags(expanded_frameworks_paths)
flags.add(get_framework_linker_args(ctx, linkable.framework_names))
for library_name in linkable.library_names:
flags.add("-l" + library_name)
return flags
def get_framework_search_path_flags(ctx: "context") -> "cmd_args":
unresolved_framework_dirs = _get_non_sdk_unresolved_framework_directories(ctx.attrs.frameworks)
expanded_framework_dirs = _expand_sdk_framework_paths(ctx, unresolved_framework_dirs)
return _get_framework_search_path_flags(expanded_framework_dirs)
def _get_framework_search_path_flags(frameworks: ["cmd_args"]) -> "cmd_args":
flags = cmd_args()
for directory in frameworks:
flags.add(["-F", directory])
return flags
def _get_non_sdk_unresolved_framework_directories(frameworks: [""]) -> [""]:
# We don't want to include SDK directories as those are already added via `isysroot` flag in toolchain definition.
# Adding those directly via `-F` will break building Catalyst applications as frameworks from support directory
# won't be found and those for macOS platform will be used.
return dedupe(filter(None, [_non_sdk_unresolved_framework_directory(x) for x in frameworks]))
def to_framework_name(framework_path: str.type) -> str.type:
name, ext = paths.split_extension(paths.basename(framework_path))
expect(ext == ".framework", "framework `{}` missing `.framework` suffix", framework_path)
return name
def _library_name(library: str.type) -> str.type:
if ":" in library:
fail("Invalid library: {}. Use the field 'linker_flags' with $(location ) macro if you want to pass in a BUCK target for libraries.".format(library))
name = paths.basename(library)
if not name.startswith("lib"):
fail("unexpected library: {}".format(library))
return paths.split_extension(name[3:])[0]
def _expand_sdk_framework_paths(ctx: "context", unresolved_framework_paths: [str.type]) -> ["cmd_args"]:
return [_expand_sdk_framework_path(ctx, unresolved_framework_path) for unresolved_framework_path in unresolved_framework_paths]
def _expand_sdk_framework_path(ctx: "context", framework_path: str.type) -> "cmd_args":
apple_toolchain_info = ctx.attrs._apple_toolchain[AppleToolchainInfo]
path_expansion_map = {
"$PLATFORM_DIR/": apple_toolchain_info.platform_path,
"$SDKROOT/": apple_toolchain_info.sdk_path,
}
for (trailing_path_variable, path_value) in path_expansion_map.items():
(before, separator, relative_path) = framework_path.partition(trailing_path_variable)
if separator == trailing_path_variable:
if len(before) > 0:
fail("Framework symbolic path not anchored at the beginning, tried expanding `{}`".format(framework_path))
if relative_path.count("$") > 0:
fail("Framework path contains multiple symbolic paths, tried expanding `{}`".format(framework_path))
if len(relative_path) == 0:
fail("Framework symbolic path contains no relative path to expand, tried expanding `{}`, relative path: `{}`, before: `{}`, separator `{}`".format(framework_path, relative_path, before, separator))
return cmd_args([path_value, relative_path], delimiter = "/")
if framework_path.find("$") == 0:
fail("Failed to expand framework path: {}".format(framework_path))
return cmd_args(framework_path)
def _non_sdk_unresolved_framework_directory(framework_path: str.type) -> [str.type, None]:
# We must only drop any framework paths that are part of the implicit
# framework search paths in the linker + compiler, all other paths
# must be expanded and included as part of the command.
for implicit_search_path in _IMPLICIT_SDKROOT_FRAMEWORK_SEARCH_PATHS:
if framework_path.find(implicit_search_path) == 0:
return None
return paths.dirname(framework_path)
def build_link_args_with_deduped_framework_flags(
ctx: "context",
info: "MergedLinkInfo",
frameworks_linkable: ["FrameworksLinkable", None],
link_style: "LinkStyle",
prefer_stripped: bool.type = False) -> LinkArgs.type:
frameworks_link_info = _link_info_from_frameworks_linkable(ctx, [info.frameworks[link_style], frameworks_linkable])
if not frameworks_link_info:
return get_link_args(info, link_style, prefer_stripped)
return LinkArgs(
tset = (ctx.actions.tset(
LinkInfosTSet,
value = LinkInfos(default = frameworks_link_info, stripped = frameworks_link_info),
children = [info._infos[link_style]],
), prefer_stripped),
)
def get_frameworks_link_info_by_deduping_link_infos(
ctx: "context",
infos: [[LinkInfo.type, None]],
framework_linkable: [FrameworksLinkable.type, None]) -> [LinkInfo.type, None]:
# When building a framework or executable, all frameworks used by the statically-linked
# deps in the subtree need to be linked.
#
# Without deduping, we've seen the linking step fail because the argsfile
# exceeds the acceptable size by the linker.
framework_linkables = _extract_framework_linkables(infos)
if framework_linkable:
framework_linkables.append(framework_linkable)
return _link_info_from_frameworks_linkable(ctx, framework_linkables)
def _extract_framework_linkables(link_infos: [[LinkInfo.type], None]) -> [FrameworksLinkable.type]:
frameworks_type = LinkableType("frameworks")
linkables = []
for info in link_infos:
for linkable in info.linkables:
if linkable._type == frameworks_type:
linkables.append(linkable)
return linkables
def _link_info_from_frameworks_linkable(ctx: "context", framework_linkables: [[FrameworksLinkable.type, None]]) -> [LinkInfo.type, None]:
framework_link_args = _get_apple_frameworks_linker_flags(ctx, merge_framework_linkables(framework_linkables))
return LinkInfo(
pre_flags = [framework_link_args],
) if framework_link_args else None

View File

@ -0,0 +1,139 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(":apple_bundle_destination.bzl", "AppleBundleDestination")
load(":apple_bundle_part.bzl", "AppleBundlePart")
load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_product_name")
load(":apple_sdk.bzl", "get_apple_sdk_name")
load(
":apple_sdk_metadata.bzl",
"AppleSdkMetadata", # @unused Used as a type
"MacOSXCatalystSdkMetadata",
"MacOSXSdkMetadata",
"WatchOSSdkMetadata",
"WatchSimulatorSdkMetadata",
"get_apple_sdk_metadata_for_sdk_name",
)
load(":apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo")
def process_info_plist(ctx: "context", override_input: ["artifact", None]) -> AppleBundlePart.type:
input = _preprocess_info_plist(ctx)
output = ctx.actions.declare_output("Info.plist")
additional_keys = _additional_keys_as_json_file(ctx)
override_keys = _override_keys_as_json_file(ctx)
process_plist(
ctx = ctx,
input = input,
output = output.as_output(),
override_input = override_input,
additional_keys = additional_keys,
override_keys = override_keys,
)
return AppleBundlePart(source = output, destination = AppleBundleDestination("metadata"))
def _get_plist_run_options() -> {str.type: bool.type}:
return {
# Output is deterministic, so can be cached
"allow_cache_upload": True,
# plist generation is cheap and fast, RE network overhead not worth it
"prefer_local": True,
}
def _preprocess_info_plist(ctx: "context") -> "artifact":
input = ctx.attrs.info_plist
output = ctx.actions.declare_output("PreprocessedInfo.plist")
substitutions_json = _plist_substitutions_as_json_file(ctx)
apple_tools = ctx.attrs._apple_tools[AppleToolsInfo]
processor = apple_tools.info_plist_processor
command = cmd_args([
processor,
"preprocess",
"--input",
input,
"--output",
output.as_output(),
"--product-name",
get_product_name(ctx),
])
if substitutions_json != None:
command.add(["--substitutions-json", substitutions_json])
ctx.actions.run(command, category = "apple_preprocess_info_plist", **_get_plist_run_options())
return output
def _plist_substitutions_as_json_file(ctx: "context") -> ["artifact", None]:
info_plist_substitutions = ctx.attrs.info_plist_substitutions
if not info_plist_substitutions:
return None
substitutions_json = ctx.actions.write_json("plist_substitutions.json", info_plist_substitutions)
return substitutions_json
def process_plist(ctx: "context", input: "artifact", output: "output_artifact", override_input: ["artifact", None] = None, additional_keys: ["artifact", None] = None, override_keys: ["artifact", None] = None, action_id: [str.type, None] = None):
apple_tools = ctx.attrs._apple_tools[AppleToolsInfo]
processor = apple_tools.info_plist_processor
override_input_arguments = ["--override-input", override_input] if override_input != None else []
additional_keys_arguments = ["--additional-keys", additional_keys] if additional_keys != None else []
override_keys_arguments = ["--override-keys", override_keys] if override_keys != None else []
command = cmd_args([
processor,
"process",
"--input",
input,
"--output",
output,
] + override_input_arguments + additional_keys_arguments + override_keys_arguments)
ctx.actions.run(command, category = "apple_process_info_plist", identifier = action_id or input.basename, **_get_plist_run_options())
def _additional_keys_as_json_file(ctx: "context") -> "artifact":
additional_keys = _info_plist_additional_keys(ctx)
return ctx.actions.write_json("plist_additional.json", additional_keys)
def _info_plist_additional_keys(ctx: "context") -> {str.type: ""}:
sdk_name = get_apple_sdk_name(ctx)
sdk_metadata = get_apple_sdk_metadata_for_sdk_name(sdk_name)
result = _extra_mac_info_plist_keys(sdk_metadata, ctx.attrs.extension)
result["CFBundleSupportedPlatforms"] = sdk_metadata.info_plist_supported_platforms_values
result["DTPlatformName"] = sdk_name
sdk_version = ctx.attrs._apple_toolchain[AppleToolchainInfo].sdk_version
if sdk_version:
result["DTPlatformVersion"] = sdk_version
result["DTSDKName"] = sdk_name + sdk_version
sdk_build_version = ctx.attrs._apple_toolchain[AppleToolchainInfo].sdk_build_version
if sdk_build_version:
result["DTPlatformBuild"] = sdk_build_version
result["DTSDKBuild"] = sdk_build_version
xcode_build_version = ctx.attrs._apple_toolchain[AppleToolchainInfo].xcode_build_version
if xcode_build_version:
result["DTXcodeBuild"] = xcode_build_version
xcode_version = ctx.attrs._apple_toolchain[AppleToolchainInfo].xcode_version
if xcode_version:
result["DTXcode"] = xcode_version
result[sdk_metadata.min_version_plist_info_key] = get_bundle_min_target_version(ctx)
return result
def _extra_mac_info_plist_keys(sdk_metadata: AppleSdkMetadata.type, extension: str.type) -> {str.type: ""}:
if sdk_metadata.name == MacOSXSdkMetadata.name and extension == "xpc":
return {
"NSHighResolutionCapable": True,
"NSSupportsAutomaticGraphicsSwitching": True,
}
else:
return {}
def _override_keys_as_json_file(ctx: "context") -> "artifact":
override_keys = _info_plist_override_keys(ctx)
return ctx.actions.write_json("plist_override.json", override_keys)
def _info_plist_override_keys(ctx: "context") -> {str.type: ""}:
sdk_name = get_apple_sdk_name(ctx)
result = {}
if sdk_name == MacOSXSdkMetadata.name:
if ctx.attrs.extension != "xpc":
result["LSRequiresIPhoneOS"] = False
elif sdk_name not in [WatchOSSdkMetadata.name, WatchSimulatorSdkMetadata.name, MacOSXCatalystSdkMetadata.name]:
result["LSRequiresIPhoneOS"] = True
return result

View File

@ -0,0 +1,60 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
# `apple_bundle.info_plist_substitutions` might contain `CODE_SIGN_ENTITLEMENTS` key which (as per v1 documentation):
#
# > Code signing will embed entitlements pointed to by the entitlements_file arg in the bundle's apple_binary.
# > This is the preferred way to specify entitlements when building with Buck.
# > If the entitlements file is not present, it falls back to the CODE_SIGN_ENTITLEMENTS entry in info_plist_substitutions.
#
# In order to properly depend on this fallback entitlements file (and manipulate it) we have to convert this text entry into the source artifact.
# We only can do that on macro layer, hence the purpose of the following code.
_SOURCE_ROOT_PREFIX = "$(SOURCE_ROOT)/"
_CODE_SIGN_ENTITLEMENTS_KEY = "CODE_SIGN_ENTITLEMENTS"
def _find_first_variable(string: str.type) -> [(str.type, (str.type, str.type)), None]:
"""
If variable like `$(FOO)` is not found in `string` returns `None`, else returns tuple
with first element equal to variable name (e.g. `FOO`) and second element equal to tuple
of part before and after this variable.
"""
expansion_start = "$("
expansion_end = ")"
variable_start = string.find(expansion_start)
if variable_start == -1:
return None
variable_end = string.find(expansion_end, variable_start)
if variable_end == -1:
fail("Expected variable expansion in string: `{}`".format(string))
variable = string[variable_start + len(expansion_start):variable_end - len(expansion_end) + 1]
prefix = string[:variable_start]
suffix = string[variable_end + 1:]
return (variable, (prefix, suffix))
def _expand_codesign_entitlements_path(info_plist_substitutions: {str.type: str.type}, path: str.type) -> str.type:
path = path.strip()
for _ in range(100):
if path.startswith(_SOURCE_ROOT_PREFIX):
path = path[len(_SOURCE_ROOT_PREFIX):]
maybe_variable = _find_first_variable(path)
if not maybe_variable:
return path
(key, (prefix, suffix)) = maybe_variable
maybe_value = info_plist_substitutions.get(key)
if not maybe_value:
fail("Expected to find value for `{}` in `info_plist_substitutions` dictionary `{}`".format(key, info_plist_substitutions))
path = prefix + maybe_value + suffix
fail("Too many iteration (loop might be present) to expand `{}` with substitutions `{}`".format(path, info_plist_substitutions))
def parse_codesign_entitlements(info_plist_substitutions: [{str.type: str.type}, None]) -> [str.type, None]:
if not info_plist_substitutions:
return None
maybe_path = info_plist_substitutions.get(_CODE_SIGN_ENTITLEMENTS_KEY)
if not maybe_path:
return None
return _expand_codesign_entitlements_path(info_plist_substitutions, maybe_path)

View File

@ -0,0 +1,272 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_dsym.bzl", "AppleDebuggableInfo", "DEBUGINFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym")
load("@prelude//apple:apple_stripping.bzl", "apple_strip_args")
load("@prelude//apple/swift:swift_compilation.bzl", "compile_swift", "get_swift_anonymous_targets", "get_swift_dependency_info", "get_swift_pcm_uncompile_info", "uses_explicit_modules")
load("@prelude//cxx:cxx_library.bzl", "cxx_library_parameterized")
load("@prelude//cxx:cxx_library_utility.bzl", "cxx_attr_deps", "cxx_attr_exported_deps")
load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags")
load("@prelude//cxx:cxx_types.bzl", "CxxRuleAdditionalParams", "CxxRuleConstructorParams", "CxxRuleProviderParams", "CxxRuleSubTargetParams")
load(
"@prelude//cxx:debug.bzl",
"ExternalDebugInfoTSet", # @unused Used as a type
"project_external_debug_info",
)
load("@prelude//cxx:headers.bzl", "cxx_attr_exported_headers")
load(
"@prelude//cxx:link.bzl",
"CxxLinkerMapData", # @unused Used as a type
)
load(
"@prelude//cxx:linker.bzl",
"SharedLibraryFlagOverrides",
)
load(
"@prelude//cxx:preprocessor.bzl",
"CPreprocessor",
)
load("@prelude//linking:link_info.bzl", "LinkStyle")
load(":apple_bundle_types.bzl", "AppleBundleLinkerMapInfo", "AppleMinDeploymentVersionInfo")
load(":apple_frameworks.bzl", "get_framework_search_path_flags")
load(":apple_modular_utility.bzl", "MODULE_CACHE_PATH")
load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags")
load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_module_name")
load(":modulemap.bzl", "preprocessor_info_for_modulemap")
load(":resource_groups.bzl", "create_resource_graph")
load(":xcode.bzl", "apple_populate_xcode_attributes")
AppleLibraryAdditionalParams = record(
# Name of the top level rule utilizing the apple_library rule.
rule_type = str.type,
# Extra flags to be passed to the linker.
extra_exported_link_flags = field(["_arglike"], []),
# Extra flags to be passed to the Swift compiler.
extra_swift_compiler_flags = field(["_arglike"], []),
# Linker flags that tell the linker to create shared libraries, overriding the default shared library flags.
# e.g. when building Apple tests, we want to link with `-bundle` instead of `-shared` to allow
# linking against the bundle loader.
shared_library_flags = field([SharedLibraryFlagOverrides.type, None], None),
# Function to use for setting Xcode attributes for the Xcode data sub target.
populate_xcode_attributes_func = field("function", apple_populate_xcode_attributes),
# Define which sub targets to generate.
generate_sub_targets = field(CxxRuleSubTargetParams.type, CxxRuleSubTargetParams()),
# Define which providers to generate.
generate_providers = field(CxxRuleProviderParams.type, CxxRuleProviderParams()),
# Forces link group linking logic, even when there's no mapping. Link group linking
# without a mapping is equivalent to statically linking the whole transitive dep graph.
force_link_group_linking = field(bool.type, False),
)
def apple_library_impl(ctx: "context") -> ["promise", ["provider"]]:
def get_apple_library_providers(deps_providers) -> ["provider"]:
constructor_params = apple_library_rule_constructor_params_and_swift_providers(
ctx,
AppleLibraryAdditionalParams(
rule_type = "apple_library",
generate_providers = CxxRuleProviderParams(
java_packaging_info = False,
android_packageable_info = False,
omnibus_root = False,
),
),
deps_providers,
)
resource_graph = create_resource_graph(
ctx = ctx,
labels = ctx.attrs.labels,
deps = cxx_attr_deps(ctx),
exported_deps = cxx_attr_exported_deps(ctx),
)
output = cxx_library_parameterized(ctx, constructor_params)
return output.providers + [resource_graph]
if uses_explicit_modules(ctx):
return get_swift_anonymous_targets(ctx, get_apple_library_providers)
else:
return get_apple_library_providers([])
def apple_library_rule_constructor_params_and_swift_providers(ctx: "context", params: AppleLibraryAdditionalParams.type, deps_providers: list.type = []) -> CxxRuleConstructorParams.type:
cxx_srcs, swift_srcs = _filter_swift_srcs(ctx)
# First create a modulemap if necessary. This is required for importing
# ObjC code in Swift so must be done before Swift compilation.
exported_hdrs = cxx_attr_exported_headers(ctx, get_apple_cxx_headers_layout(ctx))
if (ctx.attrs.modular or swift_srcs) and exported_hdrs:
modulemap_pre = preprocessor_info_for_modulemap(ctx, "exported", exported_hdrs, None)
else:
modulemap_pre = None
swift_compile = compile_swift(ctx, swift_srcs, deps_providers, exported_hdrs, modulemap_pre, params.extra_swift_compiler_flags)
swift_object_files = [swift_compile.object_file] if swift_compile else []
swift_pre = CPreprocessor()
if swift_compile:
# If we have Swift we export the extended modulemap that includes
# the ObjC exported headers and the -Swift.h header.
exported_pre = swift_compile.exported_pre
# We also include the -Swift.h header to this libraries preprocessor
# info, so that we can import it unprefixed in this module.
swift_pre = swift_compile.pre
elif modulemap_pre:
# Otherwise if this library is modular we export a modulemap of
# the ObjC exported headers.
exported_pre = modulemap_pre
else:
exported_pre = None
swift_providers = swift_compile.providers if swift_compile else [get_swift_dependency_info(ctx, exported_pre, None)]
swift_argsfile = swift_compile.swift_argsfile if swift_compile else None
modular_pre = CPreprocessor(
uses_modules = ctx.attrs.uses_modules,
modular_args = [
"-fcxx-modules",
"-fmodules",
"-fmodule-name=" + get_module_name(ctx),
"-fmodules-cache-path=" + MODULE_CACHE_PATH,
# TODO(T123756899): We have to use this hack to make compilation work
# when Clang modules are enabled and using toolchains. That's because
# resource-dir is passed as a relative path (so that no abs paths appear
# in any .pcm). The compiler will then expand and generate #include paths
# that won't work unless we have the directive below.
"-I.",
],
)
def additional_providers_factory(propagated_exported_preprocessor_info: ["CPreprocessorInfo", None]) -> ["provider"]:
# Expose `SwiftPCMUncompiledInfo` which represents the ObjC part of a target,
# if a target also has a Swift part, the provider will expose the generated `-Swift.h` header.
# This is used for Swift Explicit Modules, and allows compiling a PCM file out of the exported headers.
swift_pcm_uncompile_info = get_swift_pcm_uncompile_info(
ctx,
propagated_exported_preprocessor_info,
exported_pre,
)
providers = [swift_pcm_uncompile_info] if swift_pcm_uncompile_info else []
return providers + swift_providers
framework_search_path_pre = CPreprocessor(
args = [get_framework_search_path_flags(ctx)],
)
return CxxRuleConstructorParams(
rule_type = params.rule_type,
is_test = (params.rule_type == "apple_test"),
headers_layout = get_apple_cxx_headers_layout(ctx),
extra_exported_link_flags = params.extra_exported_link_flags,
extra_link_flags = [_get_linker_flags(ctx, swift_providers)],
extra_link_input = swift_object_files,
extra_link_input_has_external_debug_info = True,
extra_preprocessors = get_min_deployment_version_target_preprocessor_flags(ctx) + [swift_pre, modular_pre],
extra_exported_preprocessors = filter(None, [framework_search_path_pre, exported_pre]),
srcs = cxx_srcs,
additional = CxxRuleAdditionalParams(
srcs = swift_srcs,
argsfiles = [swift_argsfile] if swift_argsfile else [],
# We need to add any swift modules that we include in the link, as
# these will end up as `N_AST` entries that `dsymutil` will need to
# follow.
external_debug_info = _get_external_debug_info(swift_providers),
subtargets = {
"swift-compile": [DefaultInfo(default_output = swift_compile.object_file if swift_compile else None)],
},
additional_providers_factory = additional_providers_factory,
),
link_style_sub_targets_and_providers_factory = _get_shared_link_style_sub_targets_and_providers,
shared_library_flags = params.shared_library_flags,
# apple_library's 'stripped' arg only applies to shared subtargets, or,
# targets with 'preferred_linkage = "shared"'
strip_executable = ctx.attrs.stripped,
strip_args_factory = apple_strip_args,
force_link_group_linking = params.force_link_group_linking,
cxx_populate_xcode_attributes_func = lambda local_ctx, **kwargs: _xcode_populate_attributes(ctx = local_ctx, swift_argsfile = swift_argsfile, populate_xcode_attributes_func = params.populate_xcode_attributes_func, **kwargs),
generate_sub_targets = params.generate_sub_targets,
generate_providers = params.generate_providers,
# Some apple rules rely on `static` libs *not* following dependents.
link_groups_force_static_follows_dependents = False,
)
def _filter_swift_srcs(ctx: "context") -> (["CxxSrcWithFlags"], ["CxxSrcWithFlags"]):
cxx_srcs = []
swift_srcs = []
for s in get_srcs_with_flags(ctx):
if s.file.extension == ".swift":
swift_srcs.append(s)
else:
cxx_srcs.append(s)
return cxx_srcs, swift_srcs
def _get_shared_link_style_sub_targets_and_providers(
link_style: LinkStyle.type,
ctx: "context",
executable: "artifact",
external_debug_info: ["transitive_set", None],
_dwp: ["artifact", None],
_pdb: ["artifact", None],
linker_map: [CxxLinkerMapData.type, None]) -> ({str.type: ["provider"]}, ["provider"]):
if link_style != LinkStyle("shared"):
return ({}, [])
min_version = get_min_deployment_version_for_node(ctx)
min_version_providers = [AppleMinDeploymentVersionInfo(version = min_version)] if min_version != None else []
external_debug_info_args = project_external_debug_info(
actions = ctx.actions,
label = ctx.label,
infos = [external_debug_info],
)
dsym_artifact = get_apple_dsym(
ctx = ctx,
executable = executable,
external_debug_info = external_debug_info_args,
action_identifier = executable.short_path,
)
subtargets = {
DSYM_SUBTARGET: [DefaultInfo(default_output = dsym_artifact)],
DEBUGINFO_SUBTARGET: [DefaultInfo(other_outputs = external_debug_info_args)],
}
providers = [
AppleDebuggableInfo(dsyms = [dsym_artifact], external_debug_info = external_debug_info),
] + min_version_providers
if linker_map != None:
subtargets["linker-map"] = [DefaultInfo(default_output = linker_map.map, other_outputs = [linker_map.binary])]
providers += [AppleBundleLinkerMapInfo(linker_maps = [linker_map.map])]
return (subtargets, providers)
def _get_external_debug_info(swift_providers: ["provider"]) -> [ExternalDebugInfoTSet.type]:
tsets = []
for p in swift_providers:
if hasattr(p, "external_debug_info"):
if p.external_debug_info != None:
tsets.append(p.external_debug_info)
return tsets
def _get_linker_flags(ctx: "context", swift_providers: ["provider"]) -> "cmd_args":
cmd = cmd_args(get_min_deployment_version_target_linker_flags(ctx))
for p in swift_providers:
if hasattr(p, "transitive_swiftmodule_paths"):
cmd.add(p.transitive_swiftmodule_paths.project_as_args("linker_args"))
return cmd
def _xcode_populate_attributes(
ctx,
srcs: ["CxxSrcWithFlags"],
argsfiles_by_ext: {str.type: "artifact"},
swift_argsfile: ["CxxAdditionalArgsfileParams", None],
populate_xcode_attributes_func: "function",
**_kwargs) -> {str.type: ""}:
if swift_argsfile:
argsfiles_by_ext[swift_argsfile.extension] = swift_argsfile.file
data = populate_xcode_attributes_func(ctx, srcs = srcs, argsfiles_by_ext = argsfiles_by_ext, product_name = ctx.attrs.name)
return data

View File

@ -0,0 +1,49 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(":apple_package_config.bzl", "apple_package_config")
load(
":apple_rules_impl_utility.bzl",
"APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME",
"APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE_ATTR_NAME",
)
_APPLE_LIBRARY_LOCAL_EXECUTION_OVERRIDES = {
APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE_ATTR_NAME: ("apple", "link_libraries_locally_override"),
APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME: ("apple", "archive_objects_locally_override"),
}
_APPLE_BINARY_LOCAL_EXECUTION_OVERRIDES = {
"link_locally_override": ("apple", "link_binaries_locally_override"),
}
def apple_macro_layer_set_bool_override_attrs_from_config(attrib_map: {str.type: (str.type, str.type)}) -> {str.type: "selector"}:
attribs = {}
for (attrib_name, (config_section, config_key)) in attrib_map.items():
config_value = read_config(config_section, config_key, None)
if config_value != None:
config_truth_value = config_value.lower() == "true"
attribs[attrib_name] = select({
"DEFAULT": config_truth_value,
# Do not set attribute value for host tools
"ovr_config//platform/macos/constraints:execution-platform-transitioned": None,
})
return attribs
def apple_library_macro_impl(apple_library_rule = None, **kwargs):
kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_LIBRARY_LOCAL_EXECUTION_OVERRIDES))
apple_library_rule(**kwargs)
def apple_binary_macro_impl(apple_binary_rule = None, **kwargs):
kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_BINARY_LOCAL_EXECUTION_OVERRIDES))
apple_binary_rule(**kwargs)
def apple_package_macro_impl(apple_package_rule = None, **kwargs):
kwargs.update(apple_package_config())
apple_package_rule(
**kwargs
)

View File

@ -0,0 +1,14 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
# We use a fixed module cache location. This works around issues with
# multi-user setups with MobileOnDemand and allows us to share the
# module cache with Xcode, LLDB and arc focus.
#
# TODO(T123737676): This needs to be changed to use $TMPDIR in a
# wrapper for modular clang compilation.
MODULE_CACHE_PATH = "/tmp/buck-module-cache"

View File

@ -0,0 +1,130 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:paths.bzl", "paths")
load(":apple_bundle_destination.bzl", "AppleBundleDestination", "bundle_relative_path_for_destination")
load(":apple_bundle_types.bzl", "AppleBundleInfo")
load(":apple_package_config.bzl", "IpaCompressionLevel")
load(":apple_sdk.bzl", "get_apple_sdk_name")
load(":apple_toolchain_types.bzl", "AppleToolchainInfo")
_SKIP_COPYING_SWIFT_STDLIB_EXTENSIONS = [
".framework",
".appex",
]
def apple_package_impl(ctx: "context") -> ["provider"]:
ipa_contents = _get_ipa_contents(ctx)
compression_level = _compression_level_arg(IpaCompressionLevel(ctx.attrs._ipa_compression_level))
package = ctx.actions.declare_output("{}.ipa".format(ctx.attrs.bundle.label.name))
# TODO(T110378117): Pull this into a shared zip utility function
zip = cmd_args(["(cd \"", cmd_args(ipa_contents), "\" && zip -X -r {} - .) > ".format(compression_level), package.as_output()], delimiter = "")
ctx.actions.run(["sh", "-c", zip], category = "apple_package_zip")
return [DefaultInfo(default_output = package)]
def _get_ipa_contents(ctx) -> "artifact":
bundle = ctx.attrs.bundle
app = bundle[DefaultInfo].default_outputs[0]
contents = {
paths.join("Payload", app.basename): app,
}
apple_bundle_info = bundle[AppleBundleInfo]
should_copy_swift_stdlib = not (apple_bundle_info.skip_copying_swift_stdlib or app.extension in _SKIP_COPYING_SWIFT_STDLIB_EXTENSIONS)
if should_copy_swift_stdlib:
contents["SwiftSupport"] = _get_swift_support_dir(ctx, app, apple_bundle_info)
if apple_bundle_info.contains_watchapp:
contents["Symbols"] = _build_symbols_dir(ctx)
return ctx.actions.copied_dir(
"__unzipped_ipa_contents__",
contents,
)
def _build_symbols_dir(ctx) -> "artifact":
symbols_dir = ctx.actions.declare_output("__symbols__", dir = True)
ctx.actions.run(
cmd_args(["mkdir", "-p", symbols_dir.as_output()]),
category = "watchos_symbols_dir",
)
return symbols_dir
def _get_swift_support_dir(ctx, bundle_output: "artifact", bundle_info: AppleBundleInfo.type) -> "artifact":
stdlib_tool = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info.swift_stdlib_tool
sdk_name = get_apple_sdk_name(ctx)
# .app -> app
# This is the way the input is expected.
extension = bundle_output.extension[1:]
swift_support_dir = ctx.actions.declare_output("__swift_dylibs__", dir = True)
script, _ = ctx.actions.write(
"build_swift_support.sh",
[
cmd_args(swift_support_dir, format = "mkdir -p {}"),
cmd_args(
[
stdlib_tool,
# If you're debugging, you can pass the '--verbose' flag here.
"--copy",
"--scan-executable",
cmd_args(
[
bundle_output,
bundle_relative_path_for_destination(AppleBundleDestination("executables"), sdk_name, extension),
bundle_info.binary_name,
],
delimiter = "/",
),
_get_scan_folder_args(AppleBundleDestination("plugins"), bundle_output, sdk_name, extension),
_get_scan_folder_args(AppleBundleDestination("frameworks"), bundle_output, sdk_name, extension),
"--destination",
swift_support_dir,
],
delimiter = " ",
quote = "shell",
),
],
allow_args = True,
)
ctx.actions.run(
cmd_args(["/bin/sh", script]).hidden([stdlib_tool, bundle_output, swift_support_dir.as_output()]),
category = "copy_swift_stdlibs",
)
return swift_support_dir
def _get_scan_folder_args(dest: AppleBundleDestination.type, bundle_output: "artifact", sdk_name, extension) -> "_arglike":
return cmd_args(
[
"--scan-folder",
cmd_args(
[
bundle_output,
bundle_relative_path_for_destination(dest, sdk_name, extension),
],
delimiter = "/",
),
],
)
def _compression_level_arg(compression_level: IpaCompressionLevel.type) -> str.type:
if compression_level.value == "none":
return "-0"
elif compression_level.value == "default":
return "-6"
elif compression_level.value == "min":
return "-1"
elif compression_level.value == "max":
return "-9"
else:
fail("Unknown .ipa compression level: " + str(compression_level))

View File

@ -0,0 +1,18 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
IpaCompressionLevel = enum(
"min",
"max",
"default",
"none",
)
def apple_package_config() -> {str.type: ""}:
return {
"_ipa_compression_level": read_config("apple", "ipa_compression_level", IpaCompressionLevel("default").value),
}

View File

@ -0,0 +1,35 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(":apple_resource_types.bzl", "AppleResourceDestination", "AppleResourceSpec")
load(":resource_groups.bzl", "create_resource_graph")
def apple_resource_impl(ctx: "context") -> ["provider"]:
destination = ctx.attrs.destination or "resources"
resource_spec = AppleResourceSpec(
files = ctx.attrs.files,
dirs = ctx.attrs.dirs,
content_dirs = ctx.attrs.content_dirs,
destination = AppleResourceDestination(destination),
variant_files = ctx.attrs.variants or [],
named_variant_files = ctx.attrs.named_variants or {},
codesign_files_on_copy = ctx.attrs.codesign_on_copy,
)
graph = create_resource_graph(
ctx = ctx,
labels = ctx.attrs.labels,
deps = [],
exported_deps = [],
resource_spec = resource_spec,
)
return [DefaultInfo(
sub_targets = {
"headers": [
DefaultInfo(default_outputs = []),
],
},
), graph]

View File

@ -0,0 +1,35 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
# Represents the values for the `destination` field of `apple_resource`
AppleResourceDestination = enum(
"executables",
"frameworks",
"loginitems",
"plugins",
"resources",
"xpcservices",
)
# Defines _where_ resources need to be placed in an `apple_bundle`
AppleResourceSpec = record(
files = field([["artifact", "dependency"]], []),
dirs = field(["artifact"], []),
content_dirs = field(["artifact"], []),
destination = AppleResourceDestination.type,
variant_files = field(["artifact"], []),
# Map from locale to list of files for that locale, e.g.
# `{ "ru.lproj" : ["Localizable.strings"] }`
named_variant_files = field({str.type: ["artifact"]}, {}),
codesign_files_on_copy = field(bool.type, False),
)
# Used when invoking `ibtool`, `actool` and `momc`
AppleResourceProcessingOptions = record(
prefer_local = field(bool.type, False),
allow_cache_upload = field(bool.type, False),
)

View File

@ -0,0 +1,15 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(":apple_bundle_destination.bzl", "AppleBundleDestination")
load(
":apple_resource_types.bzl",
"AppleResourceDestination", # @unused Used as a type
)
def apple_bundle_destination_from_resource_destination(res_destination: AppleResourceDestination.type) -> AppleBundleDestination.type:
return AppleBundleDestination(res_destination.value)

View File

@ -0,0 +1,185 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple/swift:swift_toolchain.bzl", "swift_toolchain_impl")
load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo")
load("@prelude//cxx:omnibus.bzl", "omnibus_environment_attr")
load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr")
load("@prelude//linking:link_info.bzl", "LinkOrdering")
load("@prelude//decls/common.bzl", "Linkage")
load(":apple_asset_catalog.bzl", "apple_asset_catalog_impl")
load(":apple_binary.bzl", "apple_binary_impl")
load(":apple_bundle.bzl", "apple_bundle_impl")
load(":apple_bundle_types.bzl", "AppleBundleInfo")
load(":apple_core_data.bzl", "apple_core_data_impl")
load(":apple_library.bzl", "apple_library_impl")
load(":apple_package.bzl", "apple_package_impl")
load(":apple_package_config.bzl", "IpaCompressionLevel")
load(":apple_resource.bzl", "apple_resource_impl")
load(
":apple_rules_impl_utility.bzl",
"APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME",
"APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE_ATTR_NAME",
"apple_bundle_extra_attrs",
"apple_test_extra_attrs",
"get_apple_toolchain_attr",
"get_apple_xctoolchain_attr",
"get_apple_xctoolchain_bundle_id_attr",
)
load(":apple_test.bzl", "apple_test_impl")
load(":apple_toolchain.bzl", "apple_toolchain_impl")
load(":apple_toolchain_types.bzl", "AppleToolsInfo")
load(":prebuilt_apple_framework.bzl", "prebuilt_apple_framework_impl")
load(":xcode_postbuild_script.bzl", "xcode_postbuild_script_impl")
load(":xcode_prebuild_script.bzl", "xcode_prebuild_script_impl")
implemented_rules = {
"apple_asset_catalog": apple_asset_catalog_impl,
"apple_binary": apple_binary_impl,
"apple_bundle": apple_bundle_impl,
"apple_library": apple_library_impl,
"apple_package": apple_package_impl,
"apple_resource": apple_resource_impl,
"apple_test": apple_test_impl,
"apple_toolchain": apple_toolchain_impl,
"core_data_model": apple_core_data_impl,
"prebuilt_apple_framework": prebuilt_apple_framework_impl,
"swift_toolchain": swift_toolchain_impl,
"xcode_postbuild_script": xcode_postbuild_script_impl,
"xcode_prebuild_script": xcode_prebuild_script_impl,
}
_APPLE_TOOLCHAIN_ATTR = get_apple_toolchain_attr()
extra_attributes = {
"apple_asset_catalog": {
"dirs": attrs.list(attrs.source(allow_directory = True), default = []),
},
"apple_binary": {
"binary_linker_flags": attrs.list(attrs.arg(), default = []),
"enable_distributed_thinlto": attrs.bool(default = False),
"extra_xcode_sources": attrs.list(attrs.source(allow_directory = True), default = []),
"link_group_map": link_group_map_attr(),
"link_locally_override": attrs.option(attrs.bool(), default = None),
"link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None),
"precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None),
"prefer_stripped_objects": attrs.bool(default = False),
"preferred_linkage": attrs.enum(Linkage, default = "any"),
"stripped": attrs.bool(default = False),
"_apple_toolchain": _APPLE_TOOLCHAIN_ATTR,
"_apple_xctoolchain": get_apple_xctoolchain_attr(),
"_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(),
"_omnibus_environment": omnibus_environment_attr(),
},
"apple_bundle": apple_bundle_extra_attrs(),
"apple_library": {
"extra_xcode_sources": attrs.list(attrs.source(allow_directory = True), default = []),
"link_group_map": link_group_map_attr(),
"link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None),
"precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None),
"preferred_linkage": attrs.enum(Linkage, default = "any"),
"serialize_debugging_options": attrs.bool(default = True),
"stripped": attrs.bool(default = False),
"supports_shlib_interfaces": attrs.bool(default = True),
"use_archive": attrs.option(attrs.bool(), default = None),
"_apple_toolchain": _APPLE_TOOLCHAIN_ATTR,
# FIXME: prelude// should be standalone (not refer to fbsource//)
"_apple_tools": attrs.exec_dep(default = "fbsource//xplat/buck2/platform/apple:apple-tools", providers = [AppleToolsInfo]),
"_apple_xctoolchain": get_apple_xctoolchain_attr(),
"_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(),
"_omnibus_environment": omnibus_environment_attr(),
APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None),
APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None),
},
"apple_package": {
"bundle": attrs.dep(providers = [AppleBundleInfo]),
"_apple_toolchain": _APPLE_TOOLCHAIN_ATTR,
"_ipa_compression_level": attrs.enum(IpaCompressionLevel.values()),
},
"apple_resource": {
"codesign_on_copy": attrs.bool(default = False),
"content_dirs": attrs.list(attrs.source(allow_directory = True), default = []),
"dirs": attrs.list(attrs.source(allow_directory = True), default = []),
"files": attrs.list(attrs.one_of(attrs.dep(), attrs.source()), default = []),
},
"apple_test": apple_test_extra_attrs(),
"apple_toolchain": {
# The Buck v1 attribute specs defines those as `attrs.source()` but
# we want to properly handle any runnable tools that might have
# addition runtime requirements.
"actool": attrs.exec_dep(providers = [RunInfo]),
"codesign": attrs.exec_dep(providers = [RunInfo]),
"codesign_allocate": attrs.exec_dep(providers = [RunInfo]),
"codesign_identities_command": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None),
# Controls invocations of `ibtool`, `actool` and `momc`
"compile_resources_locally": attrs.bool(default = False),
"cxx_toolchain": attrs.toolchain_dep(),
"dsymutil": attrs.exec_dep(providers = [RunInfo]),
"dwarfdump": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None),
"ibtool": attrs.exec_dep(providers = [RunInfo]),
"installer": attrs.default_only(attrs.label(default = "buck//src/com/facebook/buck/installer/apple:apple_installer")),
"libtool": attrs.exec_dep(providers = [RunInfo]),
"lipo": attrs.exec_dep(providers = [RunInfo]),
"min_version": attrs.option(attrs.string(), default = None),
"momc": attrs.exec_dep(providers = [RunInfo]),
# A placeholder tool that can be used to set up toolchain constraints.
# Useful when fat and thin toolchahins share the same underlying tools via `command_alias()`,
# which requires setting up separate platform-specific aliases with the correct constraints.
"placeholder_tool": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None),
"platform_path": attrs.option(attrs.source(), default = None), # Mark as optional until we remove `_internal_platform_path`
# Defines whether the Xcode project generator needs to check
# that the selected Xcode version matches the one defined
# by the `xcode_build_version` fields.
"requires_xcode_version_match": attrs.bool(default = False),
"sdk_path": attrs.option(attrs.source(), default = None), # Mark as optional until we remove `_internal_sdk_path`
"swift_toolchain": attrs.option(attrs.toolchain_dep(), default = None),
"version": attrs.option(attrs.string(), default = None),
"xcode_build_version": attrs.option(attrs.string(), default = None),
"xcode_version": attrs.option(attrs.string(), default = None),
"xctest": attrs.exec_dep(providers = [RunInfo]),
# TODO(T111858757): Mirror of `platform_path` but treated as a string. It allows us to
# pass abs paths during development and using the currently selected Xcode.
"_internal_platform_path": attrs.option(attrs.string(), default = None),
# TODO(T111858757): Mirror of `sdk_path` but treated as a string. It allows us to
# pass abs paths during development and using the currently selected Xcode.
"_internal_sdk_path": attrs.option(attrs.string(), default = None),
},
"core_data_model": {
"path": attrs.source(allow_directory = True),
},
"prebuilt_apple_framework": {
"framework": attrs.option(attrs.source(allow_directory = True), default = None),
"preferred_linkage": attrs.enum(Linkage, default = "any"),
"_apple_toolchain": _APPLE_TOOLCHAIN_ATTR,
"_omnibus_environment": omnibus_environment_attr(),
},
"scene_kit_assets": {
"path": attrs.source(allow_directory = True),
},
"swift_library": {
"preferred_linkage": attrs.enum(Linkage, default = "any"),
},
"swift_toolchain": {
"architecture": attrs.option(attrs.string(), default = None), # TODO(T115173356): Make field non-optional
# A placeholder tool that can be used to set up toolchain constraints.
# Useful when fat and thin toolchahins share the same underlying tools via `command_alias()`,
# which requires setting up separate platform-specific aliases with the correct constraints.
"placeholder_tool": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None),
"platform_path": attrs.option(attrs.source(), default = None), # Mark as optional until we remove `_internal_platform_path`
"sdk_modules": attrs.list(attrs.exec_dep(), default = []), # A list or a root target that represent a graph of sdk modules (e.g Frameworks)
"sdk_path": attrs.option(attrs.source(), default = None), # Mark as optional until we remove `_internal_sdk_path`
"swift_stdlib_tool": attrs.exec_dep(providers = [RunInfo]),
"swiftc": attrs.exec_dep(providers = [RunInfo]),
# TODO(T111858757): Mirror of `platform_path` but treated as a string. It allows us to
# pass abs paths during development and using the currently selected Xcode.
"_internal_platform_path": attrs.option(attrs.string(), default = None),
# TODO(T111858757): Mirror of `sdk_path` but treated as a string. It allows us to
# pass abs paths during development and using the currently selected Xcode.
"_internal_sdk_path": attrs.option(attrs.string(), default = None),
"_swiftc_wrapper": attrs.exec_dep(providers = [RunInfo], default = "prelude//apple/tools:swift_exec"),
},
}

View File

@ -0,0 +1,98 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo")
load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignType")
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo")
load("@prelude//apple/user:apple_selective_debugging.bzl", "AppleSelectiveDebuggingInfo")
load("@prelude//apple/user:resource_group_map.bzl", "resource_group_map_attr")
load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo")
load("@prelude//cxx:omnibus.bzl", "omnibus_environment_attr")
load("@prelude//linking:link_info.bzl", "LinkOrdering")
load("@prelude//decls/common.bzl", "LinkableDepType", "Linkage")
def get_apple_toolchain_attr():
# FIXME: prelude// should be standalone (not refer to fbcode//)
return attrs.toolchain_dep(default = "fbcode//buck2/platform/toolchain:apple-default", providers = [AppleToolchainInfo])
def _get_apple_bundle_toolchain_attr():
# FIXME: prelude// should be standalone (not refer to fbcode//)
return attrs.toolchain_dep(default = "fbcode//buck2/platform/toolchain:apple-bundle", providers = [AppleToolchainInfo])
def get_apple_xctoolchain_attr():
# FIXME: prelude// should be standalone (not refer to fbcode//)
return attrs.toolchain_dep(default = "fbcode//buck2/platform/toolchain:apple-xctoolchain")
def get_apple_xctoolchain_bundle_id_attr():
# FIXME: prelude// should be standalone (not refer to fbcode//)
return attrs.toolchain_dep(default = "fbcode//buck2/platform/toolchain:apple-xctoolchain-bundle-id")
APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE_ATTR_NAME = "_link_libraries_locally_override"
APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME = "_archive_objects_locally_override"
APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME = "_use_entitlements_when_adhoc_code_signing"
APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME = "use_entitlements_when_adhoc_code_signing"
def _apple_bundle_like_common_attrs():
# `apple_bundle()` and `apple_test()` share a common set of extra attrs
return {
# FIXME: prelude// should be standalone (not refer to fbsource//)
"_apple_tools": attrs.exec_dep(default = "fbsource//xplat/buck2/platform/apple:apple-tools", providers = [AppleToolsInfo]),
"_apple_xctoolchain": get_apple_xctoolchain_attr(),
"_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(),
"_bundling_cache_buster": attrs.option(attrs.string(), default = None),
"_bundling_log_file_enabled": attrs.bool(default = False),
"_codesign_type": attrs.option(attrs.enum(CodeSignType.values()), default = None),
"_compile_resources_locally_override": attrs.option(attrs.bool(), default = None),
"_dry_run_code_signing": attrs.bool(default = False),
"_fast_adhoc_signing_enabled": attrs.bool(default = False),
"_incremental_bundling_enabled": attrs.bool(default = False),
"_profile_bundling_enabled": attrs.bool(default = False),
APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None),
APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME: attrs.bool(default = False),
}
def apple_test_extra_attrs():
# To build an `apple_test`, one needs to first build a shared `apple_library` then
# wrap this test library into an `apple_bundle`. Because of this, `apple_test` has attributes
# from both `apple_library` and `apple_bundle`.
attribs = {
# Expected by `apple_bundle`, for `apple_test` this field is always None.
"binary": attrs.option(attrs.dep(), default = None),
# The resulting test bundle should have .xctest extension.
"extension": attrs.string(default = "xctest"),
"extra_xcode_sources": attrs.list(attrs.source(allow_directory = True), default = []),
"link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None),
# Used to create the shared test library. Any library deps whose `preferred_linkage` isn't "shared" will
# be treated as "static" deps and linked into the shared test library.
"link_style": attrs.enum(LinkableDepType, default = "static"),
"precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None),
# The test source code and lib dependencies should be built into a shared library.
"preferred_linkage": attrs.enum(Linkage, default = "shared"),
# Expected by `apple_bundle`, for `apple_test` this field is always None.
"resource_group": attrs.option(attrs.string(), default = None),
# Expected by `apple_bundle`, for `apple_test` this field is always None.
"resource_group_map": attrs.option(attrs.string(), default = None),
"stripped": attrs.bool(default = False),
"_apple_toolchain": get_apple_toolchain_attr(),
"_omnibus_environment": omnibus_environment_attr(),
APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None),
}
attribs.update(_apple_bundle_like_common_attrs())
return attribs
def apple_bundle_extra_attrs():
attribs = {
"resource_group_map": resource_group_map_attr(),
"selective_debugging": attrs.option(attrs.dep(providers = [AppleSelectiveDebuggingInfo]), default = None),
"_apple_toolchain": _get_apple_bundle_toolchain_attr(),
"_codesign_entitlements": attrs.option(attrs.source(), default = None),
# FIXME: prelude// should be standalone (not refer to fbsource//)
"_provisioning_profiles": attrs.dep(default = "fbsource//xplat/buck2/platform/apple:provisioning_profiles"),
"_resource_bundle": attrs.option(attrs.dep(providers = [AppleBundleResourceInfo]), default = None),
}
attribs.update(_apple_bundle_like_common_attrs())
return attribs

View File

@ -0,0 +1,15 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
def get_apple_sdk_name(ctx: "context") -> str.type:
"""
Get the SDK defined on the toolchain.
Will throw if the `_apple_toolchain` is not present.
"""
return ctx.attrs._apple_toolchain[AppleToolchainInfo].sdk_name

View File

@ -0,0 +1,95 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
AppleSdkMetadata = record(
name = field(str.type),
target_device_flags = field([str.type], []),
is_ad_hoc_code_sign_sufficient = field(bool.type),
info_plist_supported_platforms_values = field([str.type]),
min_version_plist_info_key = field(str.type),
)
IPhoneOSSdkMetadata = AppleSdkMetadata(
name = "iphoneos",
target_device_flags = ["--target-device", "iphone", "--target-device", "ipad"],
is_ad_hoc_code_sign_sufficient = False,
info_plist_supported_platforms_values = ["iPhoneOS"],
min_version_plist_info_key = "MinimumOSVersion",
)
IPhoneSimulatorSdkMetadata = AppleSdkMetadata(
name = "iphonesimulator",
target_device_flags = ["--target-device", "iphone", "--target-device", "ipad"],
is_ad_hoc_code_sign_sufficient = True,
info_plist_supported_platforms_values = ["iPhoneSimulator"],
min_version_plist_info_key = "MinimumOSVersion",
)
TVOSSdkMetadata = AppleSdkMetadata(
name = "appletvos",
target_device_flags = ["--target-device", "tv"],
is_ad_hoc_code_sign_sufficient = False,
info_plist_supported_platforms_values = ["AppleTVOS"],
min_version_plist_info_key = "MinimumOSVersion",
)
TVSimulatorSdkMetadata = AppleSdkMetadata(
name = "appletvsimulator",
target_device_flags = ["--target-device", "tv"],
is_ad_hoc_code_sign_sufficient = True,
info_plist_supported_platforms_values = ["AppleTVSimulator"],
min_version_plist_info_key = "MinimumOSVersion",
)
WatchOSSdkMetadata = AppleSdkMetadata(
name = "watchos",
target_device_flags = ["--target-device", "watch"],
is_ad_hoc_code_sign_sufficient = False,
info_plist_supported_platforms_values = ["WatchOS"],
min_version_plist_info_key = "MinimumOSVersion",
)
WatchSimulatorSdkMetadata = AppleSdkMetadata(
name = "watchsimulator",
target_device_flags = ["--target-device", "watch"],
is_ad_hoc_code_sign_sufficient = True,
info_plist_supported_platforms_values = ["WatchSimulator"],
min_version_plist_info_key = "MinimumOSVersion",
)
MacOSXSdkMetadata = AppleSdkMetadata(
name = "macosx",
target_device_flags = ["--target-device", "mac"],
is_ad_hoc_code_sign_sufficient = True,
info_plist_supported_platforms_values = ["MacOSX"],
min_version_plist_info_key = "LSMinimumSystemVersion",
)
MacOSXCatalystSdkMetadata = AppleSdkMetadata(
name = "maccatalyst",
target_device_flags = ["--target-device", "ipad"],
is_ad_hoc_code_sign_sufficient = True,
info_plist_supported_platforms_values = ["MacOSX"],
min_version_plist_info_key = "LSMinimumSystemVersion",
)
_SDK_MAP = {
IPhoneOSSdkMetadata.name: IPhoneOSSdkMetadata,
IPhoneSimulatorSdkMetadata.name: IPhoneSimulatorSdkMetadata,
TVOSSdkMetadata.name: TVOSSdkMetadata,
TVSimulatorSdkMetadata.name: TVSimulatorSdkMetadata,
WatchOSSdkMetadata.name: WatchOSSdkMetadata,
WatchSimulatorSdkMetadata.name: WatchSimulatorSdkMetadata,
MacOSXSdkMetadata.name: MacOSXSdkMetadata,
MacOSXCatalystSdkMetadata.name: MacOSXCatalystSdkMetadata,
}
def get_apple_sdk_metadata_for_sdk_name(name: str.type) -> AppleSdkMetadata.type:
sdk = _SDK_MAP.get(name)
if sdk == None:
fail("unrecognized sdk name: `{}`".format(name))
return sdk

View File

@ -0,0 +1,13 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info")
def apple_strip_args(ctx: "context") -> "cmd_args":
cxx_toolchain_info = get_cxx_toolchain_info(ctx)
flags = cxx_toolchain_info.strip_flags_info.strip_non_global_flags
return cmd_args(flags) if flags != None else cmd_args(["-x", "-T"])

View File

@ -0,0 +1,79 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load("@prelude//cxx:preprocessor.bzl", "CPreprocessor")
load(":apple_sdk.bzl", "get_apple_sdk_name")
# TODO(T112099448): In the future, the min version flag should live on the apple_toolchain()
# TODO(T113776898): Switch to -mtargetos= flag which should live on the apple_toolchain()
_APPLE_MIN_VERSION_FLAG_SDK_MAP = {
"iphoneos": "-mios-version-min",
"iphonesimulator": "-mios-simulator-version-min",
"macosx": "-mmacosx-version-min",
"watchos": "-mwatchos-version-min",
"watchsimulator": "-mwatchsimulator-version-min",
}
# Returns the target SDK version for apple_(binary|library) and uses
# apple_toolchain() min version as a fallback. This is the central place
# where the version for a particular node is defined, no other places
# should be accessing `attrs.target_sdk_version` or `attrs.min_version`.
def get_min_deployment_version_for_node(ctx: "context") -> [None, str.type]:
toolchain_min_version = ctx.attrs._apple_toolchain[AppleToolchainInfo].min_version
if toolchain_min_version == "":
toolchain_min_version = None
return getattr(ctx.attrs, "target_sdk_version", None) or toolchain_min_version
# Returns the min deployment flag to pass to the compiler + linker
def _get_min_deployment_version_target_flag(ctx: "context") -> [None, str.type]:
target_sdk_version = get_min_deployment_version_for_node(ctx)
if target_sdk_version == None:
return None
sdk_name = get_apple_sdk_name(ctx)
min_version_flag = _APPLE_MIN_VERSION_FLAG_SDK_MAP.get(sdk_name)
if min_version_flag == None:
fail("Could not determine min version flag for SDK {}".format(sdk_name))
return "{}={}".format(min_version_flag, target_sdk_version)
# There are two main ways in which we can pass target SDK version:
# - versioned target triple
# - unversioned target triple + version flag
#
# A versioned target triple overrides any version flags and requires
# additional flags to disable the warning/error (`-Woverriding-t-option`),
# so we prefer to use an unversioned target triple + version flag.
#
# Furthermore, we want to ensure that there's _exactly one_ version flag
# on a compiler/link line. This makes debugging easier and avoids issues
# with multiple layers each adding/overriding target SDK. It also makes
# it easier to switch to versioned target triple.
#
# There are exactly two ways in which to specify the target SDK:
# - apple_toolchain.min_version sets the default value
# - apple_(binary|library).target_sdk_version sets the per-target value
#
# apple_toolchain() rules should _never_ add any version flags because
# the rule does _not_ know whether a particular target will request a
# non-default value. Otherwise, we end up with multiple version flags,
# one added by the toolchain and then additional overrides by targets.
def get_min_deployment_version_target_linker_flags(ctx: "context") -> [str.type]:
min_version_flag = _get_min_deployment_version_target_flag(ctx)
return [min_version_flag] if min_version_flag != None else []
def get_min_deployment_version_target_preprocessor_flags(ctx: "context") -> [CPreprocessor.type]:
min_version_flag = _get_min_deployment_version_target_flag(ctx)
if min_version_flag == None:
return []
args = cmd_args(min_version_flag)
return [CPreprocessor(
args = [args],
)]

View File

@ -0,0 +1,227 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_library.bzl", "AppleLibraryAdditionalParams", "apple_library_rule_constructor_params_and_swift_providers")
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load("@prelude//apple/swift:swift_compilation.bzl", "get_swift_anonymous_targets", "uses_explicit_modules")
load(
"@prelude//cxx:compile.bzl",
"CxxSrcWithFlags", # @unused Used as a type
)
load("@prelude//cxx:cxx_library.bzl", "cxx_library_parameterized")
load("@prelude//cxx:cxx_types.bzl", "CxxRuleProviderParams", "CxxRuleSubTargetParams")
load(
"@prelude//cxx:linker.bzl",
"SharedLibraryFlagOverrides",
)
load(
"@prelude//utils:dicts.bzl",
"flatten_x",
)
load(":apple_bundle.bzl", "AppleBundlePartListConstructorParams", "get_apple_bundle_part_list")
load(":apple_bundle_destination.bzl", "AppleBundleDestination")
load(":apple_bundle_part.bzl", "AppleBundlePart", "assemble_bundle", "bundle_output", "get_bundle_dir_name")
load(":apple_bundle_types.bzl", "AppleBundleInfo")
load(":apple_bundle_utility.bzl", "get_product_name")
load(":apple_dsym.bzl", "DEBUGINFO_SUBTARGET", "DSYM_SUBTARGET", "DWARF_AND_DSYM_SUBTARGET", "get_apple_dsym")
load(":xcode.bzl", "apple_populate_xcode_attributes")
def apple_test_impl(ctx: "context") -> [["provider"], "promise"]:
def get_apple_test_providers(deps_providers) -> ["provider"]:
xctest_bundle = bundle_output(ctx)
test_host_app_bundle = _get_test_host_app_bundle(ctx)
test_host_app_binary = _get_test_host_app_binary(ctx, test_host_app_bundle)
objc_bridging_header_flags = [
# Disable bridging header -> PCH compilation to mitigate an issue in Xcode 13 beta.
"-disable-bridging-pch",
"-import-objc-header",
cmd_args(ctx.attrs.bridging_header),
] if ctx.attrs.bridging_header else []
constructor_params = apple_library_rule_constructor_params_and_swift_providers(
ctx,
AppleLibraryAdditionalParams(
rule_type = "apple_test",
extra_exported_link_flags = _get_xctest_framework_linker_flags(ctx) + _get_bundle_loader_flags(test_host_app_binary),
extra_swift_compiler_flags = _get_xctest_framework_search_paths_flags(ctx) + objc_bridging_header_flags,
shared_library_flags = SharedLibraryFlagOverrides(
# When `-bundle` is used we can't use the `-install_name` args, thus we keep this field empty.
shared_library_name_linker_flags_format = [],
# When building Apple tests, we want to link with `-bundle` instead of `-shared` to allow
# linking against the bundle loader.
shared_library_flags = ["-bundle"],
),
generate_sub_targets = CxxRuleSubTargetParams(
compilation_database = False,
headers = False,
link_group_map = False,
),
generate_providers = CxxRuleProviderParams(
compilation_database = True,
default = False,
linkable_graph = False,
link_style_outputs = False,
merged_native_link_info = False,
omnibus_root = False,
preprocessors = False,
resources = False,
shared_libraries = False,
template_placeholders = False,
),
populate_xcode_attributes_func = lambda local_ctx, **kwargs: _xcode_populate_attributes(ctx = local_ctx, xctest_bundle = xctest_bundle, test_host_app_binary = test_host_app_binary, **kwargs),
# We want to statically link the transitive dep graph of the apple_test()
# which we can achieve by forcing link group linking with
# an empty mapping (i.e., default mapping).
force_link_group_linking = True,
),
deps_providers,
)
cxx_library_output = cxx_library_parameterized(ctx, constructor_params)
test_binary_output = ctx.actions.declare_output(get_product_name(ctx))
# Rename in order to generate dSYM with correct binary name (dsymutil doesn't provide a way to control binary name in output dSYM bundle).
test_binary = ctx.actions.copy_file(test_binary_output, cxx_library_output.default_output.default)
binary_part = AppleBundlePart(source = test_binary, destination = AppleBundleDestination("executables"), new_name = ctx.attrs.name)
part_list_output = get_apple_bundle_part_list(ctx, AppleBundlePartListConstructorParams(binaries = [binary_part]))
assemble_bundle(ctx, xctest_bundle, part_list_output.parts, part_list_output.info_plist_part)
sub_targets = cxx_library_output.sub_targets
(debuginfo,) = sub_targets[DEBUGINFO_SUBTARGET]
dsym_artifact = get_apple_dsym(
ctx = ctx,
executable = test_binary,
external_debug_info = debuginfo.other_outputs,
action_identifier = "generate_apple_test_dsym",
output_path_override = get_bundle_dir_name(ctx) + ".dSYM",
)
sub_targets[DSYM_SUBTARGET] = [DefaultInfo(default_output = dsym_artifact)]
# If the test has a test host, add a subtarget to build the test host app bundle.
sub_targets["test-host"] = [DefaultInfo(default_output = test_host_app_bundle)] if test_host_app_bundle else [DefaultInfo()]
sub_targets[DWARF_AND_DSYM_SUBTARGET] = [
DefaultInfo(default_output = xctest_bundle, other_outputs = [dsym_artifact]),
_get_test_info(ctx, xctest_bundle, test_host_app_bundle, dsym_artifact),
]
return [
DefaultInfo(default_output = xctest_bundle, sub_targets = sub_targets),
_get_test_info(ctx, xctest_bundle, test_host_app_bundle),
cxx_library_output.xcode_data_info,
cxx_library_output.cxx_compilationdb_info,
]
if uses_explicit_modules(ctx):
return get_swift_anonymous_targets(ctx, get_apple_test_providers)
else:
return get_apple_test_providers([])
def _get_test_info(ctx: "context", xctest_bundle: "artifact", test_host_app_bundle: ["artifact", None], dsym_artifact: ["artifact", None] = None) -> "provider":
# When interacting with Tpx, we just pass our various inputs via env vars,
# since Tpx basiclaly wants structured output for this.
xctest_bundle = cmd_args(xctest_bundle).hidden(dsym_artifact) if dsym_artifact else xctest_bundle
env = {"XCTEST_BUNDLE": xctest_bundle}
if test_host_app_bundle == None:
tpx_label = "tpx:apple_test:buck2:logicTest"
else:
env["HOST_APP_BUNDLE"] = test_host_app_bundle
tpx_label = "tpx:apple_test:buck2:appTest"
labels = ctx.attrs.labels + [tpx_label]
labels.append(tpx_label)
return ExternalRunnerTestInfo(
type = "custom", # We inherit a label via the macro layer that overrides this.
command = ["false"], # Tpx makes up its own args, we just pass params via the env.
env = flatten_x([ctx.attrs.env or {}, env]),
labels = labels,
use_project_relative_paths = True,
run_from_project_root = True,
contacts = ctx.attrs.contacts,
executor_overrides = {
"ios-simulator": CommandExecutorConfig(
local_enabled = False,
remote_enabled = True,
remote_execution_properties = {
"platform": "ios-simulator-pure-re",
"subplatform": "iPhone 8.iOS 15.0",
"xcode-version": "xcodestable",
},
remote_execution_use_case = "buck2-default",
),
"static-listing": CommandExecutorConfig(local_enabled = True, remote_enabled = False),
},
)
def _get_test_host_app_bundle(ctx: "context") -> ["artifact", None]:
""" Get the bundle for the test host app, if one exists for this test. """
if ctx.attrs.test_host_app:
# Copy the test host app bundle into test's output directory
original_bundle = ctx.attrs.test_host_app[AppleBundleInfo].bundle
test_host_app_bundle = ctx.actions.declare_output(original_bundle.basename)
ctx.actions.copy_file(test_host_app_bundle, original_bundle)
return test_host_app_bundle
return None
def _get_test_host_app_binary(ctx: "context", test_host_app_bundle: ["artifact", None]) -> ["cmd_args", None]:
""" Reference to the binary with the test host app bundle, if one exists for this test. Captures the bundle as an artifact in the cmd_args. """
if ctx.attrs.test_host_app:
return cmd_args([test_host_app_bundle, ctx.attrs.test_host_app[AppleBundleInfo].binary_name], delimiter = "/")
return None
def _get_bundle_loader_flags(binary: ["cmd_args", None]) -> [""]:
if binary:
# During linking we need to link the test shared lib against the test host binary. The
# test host binary doesn't need to be embedded in an `apple_bundle`.
return ["-bundle_loader", binary]
return []
def _xcode_populate_attributes(
ctx,
srcs: [CxxSrcWithFlags.type],
argsfiles_by_ext: {str.type: "artifact"},
xctest_bundle: "artifact",
test_host_app_binary: ["cmd_args", None],
**_kwargs) -> {str.type: ""}:
data = apple_populate_xcode_attributes(ctx = ctx, srcs = srcs, argsfiles_by_ext = argsfiles_by_ext, product_name = ctx.attrs.name)
data["output"] = xctest_bundle
if test_host_app_binary:
data["test_host_app_binary"] = test_host_app_binary
return data
def _get_xctest_framework_search_paths(ctx: "context") -> ("cmd_args", "cmd_args"):
toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo]
xctest_swiftmodule_search_path = cmd_args([toolchain.platform_path, "Developer/usr/lib"], delimiter = "/")
xctest_framework_search_path = cmd_args([toolchain.platform_path, "Developer/Library/Frameworks"], delimiter = "/")
return (xctest_swiftmodule_search_path, xctest_framework_search_path)
def _get_xctest_framework_search_paths_flags(ctx: "context") -> [["cmd_args", str.type]]:
xctest_swiftmodule_search_path, xctest_framework_search_path = _get_xctest_framework_search_paths(ctx)
return [
"-I",
xctest_swiftmodule_search_path,
"-F",
xctest_framework_search_path,
]
def _get_xctest_framework_linker_flags(ctx: "context") -> [["cmd_args", str.type]]:
xctest_swiftmodule_search_path, xctest_framework_search_path = _get_xctest_framework_search_paths(ctx)
return [
"-L",
xctest_swiftmodule_search_path,
"-F",
xctest_framework_search_path,
]

View File

@ -0,0 +1,24 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(":apple_bundle_config.bzl", "apple_bundle_config")
load(":apple_macro_layer.bzl", "apple_macro_layer_set_bool_override_attrs_from_config")
load(
":apple_rules_impl_utility.bzl",
"APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE_ATTR_NAME",
)
_APPLE_TEST_LOCAL_EXECUTION_OVERRIDES = {
APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE_ATTR_NAME: ("apple", "link_libraries_locally_override"),
}
def apple_test_macro_impl(apple_test_rule = None, **kwargs):
kwargs.update(apple_bundle_config())
kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_TEST_LOCAL_EXECUTION_OVERRIDES))
apple_test_rule(
**kwargs
)

View File

@ -0,0 +1,44 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftToolchainInfo")
load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo")
def apple_toolchain_impl(ctx: "context") -> ["provider"]:
sdk_path = ctx.attrs._internal_sdk_path or ctx.attrs.sdk_path
platform_path = ctx.attrs._internal_platform_path or ctx.attrs.platform_path
return [
DefaultInfo(),
AppleToolchainInfo(
actool = ctx.attrs.actool[RunInfo],
ibtool = ctx.attrs.ibtool[RunInfo],
dsymutil = ctx.attrs.dsymutil[RunInfo],
dwarfdump = ctx.attrs.dwarfdump[RunInfo] if ctx.attrs.dwarfdump else None,
lipo = ctx.attrs.lipo[RunInfo],
cxx_platform_info = ctx.attrs.cxx_toolchain[CxxPlatformInfo],
cxx_toolchain_info = ctx.attrs.cxx_toolchain[CxxToolchainInfo],
codesign = ctx.attrs.codesign[RunInfo],
codesign_allocate = ctx.attrs.codesign_allocate[RunInfo],
codesign_identities_command = ctx.attrs.codesign_identities_command[RunInfo] if ctx.attrs.codesign_identities_command else None,
compile_resources_locally = ctx.attrs.compile_resources_locally,
installer = ctx.attrs.installer,
libtool = ctx.attrs.libtool[RunInfo],
momc = ctx.attrs.momc[RunInfo],
min_version = ctx.attrs.min_version,
xctest = ctx.attrs.xctest[RunInfo],
platform_path = platform_path,
sdk_name = ctx.attrs.sdk_name,
sdk_path = sdk_path,
sdk_version = ctx.attrs.version,
sdk_build_version = ctx.attrs.build_version,
swift_toolchain_info = ctx.attrs.swift_toolchain[SwiftToolchainInfo] if ctx.attrs.swift_toolchain else None,
watch_kit_stub_binary = ctx.attrs.watch_kit_stub_binary,
xcode_version = ctx.attrs.xcode_version,
xcode_build_version = ctx.attrs.xcode_build_version,
),
]

View File

@ -0,0 +1,46 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
AppleToolchainInfo = provider(fields = [
"actool", # "RunInfo"
"codesign_allocate", # "RunInfo"
"codesign_identities_command", # ["RunInfo", None]
"codesign", # "RunInfo"
"compile_resources_locally", # bool.type
"cxx_platform_info", # "CxxPlatformInfo"
"cxx_toolchain_info", # "CxxToolchainInfo"
"dsymutil", # "RunInfo"
"dwarfdump", # ["RunInfo", None]
"ibtool", # "RunInfo"
"installer", # label
"libtool", # "RunInfo"
"lipo", # "RunInfo"
"min_version", # [None, str.type]
"momc", # "RunInfo"
"platform_path", # [str.type, artifact]
"sdk_build_version", # "[None, str.type]"
# SDK name to be passed to tools (e.g. actool), equivalent to ApplePlatform::getExternalName() in v1.
"sdk_name", # str.type
"sdk_path", # [str.type, artifact]
# TODO(T124581557) Make it non-optional once there is no "selected xcode" toolchain
"sdk_version", # [None, str.type]
"swift_toolchain_info", # "SwiftToolchainInfo"
"watch_kit_stub_binary", # "artifact"
"xcode_build_version", # "[None, str.type]"
"xcode_version", # "[None, str.type]"
"xctest", # "RunInfo"
])
AppleToolsInfo = provider(fields = [
"assemble_bundle", # RunInfo
"dry_codesign_tool", # "RunInfo"
"selective_debugging_scrubber", # "RunInfo"
"info_plist_processor", # RunInfo
"make_modulemap", # "RunInfo"
"make_vfsoverlay", # "RunInfo"
"swift_objc_header_postprocess", # "RunInfo"
])

View File

@ -0,0 +1,87 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load("@prelude//cxx:headers.bzl", "CxxHeadersLayout", "CxxHeadersNaming")
load("@prelude//utils:utils.bzl", "value_or")
load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node")
_VERSION_PLACEHOLDER = "(VERSION)"
# TODO(T115177501): Make target triples part of the toolchains
# Map from SDK name -> target triple _without_ leading architecture
_TARGET_TRIPLE_MAP = {
"iphoneos": "apple-ios{}".format(_VERSION_PLACEHOLDER),
"iphonesimulator": "apple-ios{}-simulator".format(_VERSION_PLACEHOLDER),
"macosx": "apple-macosx{}".format(_VERSION_PLACEHOLDER),
"watchos": "apple-watchos{}".format(_VERSION_PLACEHOLDER),
"watchsimulator": "apple-watchos{}-simulator".format(_VERSION_PLACEHOLDER),
}
def get_explicit_modules_env_var(uses_explicit_modules: bool.type) -> dict.type:
return ({"EXPLICIT_MODULES_ENABLED": "TRUE"} if uses_explicit_modules else {})
def get_apple_cxx_headers_layout(ctx: "context") -> CxxHeadersLayout.type:
namespace = value_or(ctx.attrs.header_path_prefix, ctx.attrs.name)
return CxxHeadersLayout(namespace = namespace, naming = CxxHeadersNaming("apple"))
def get_module_name(ctx: "context") -> str.type:
return ctx.attrs.module_name or ctx.attrs.header_path_prefix or ctx.attrs.name
def has_apple_toolchain(ctx: "context") -> bool.type:
return hasattr(ctx.attrs, "_apple_toolchain")
def get_versioned_target_triple(ctx: "context") -> str.type:
apple_toolchain_info = ctx.attrs._apple_toolchain[AppleToolchainInfo]
swift_toolchain_info = apple_toolchain_info.swift_toolchain_info
architecture = swift_toolchain_info.architecture
if architecture == None:
fail("Need to set `architecture` field of swift_toolchain(), target: {}".format(ctx.label))
target_sdk_version = get_min_deployment_version_for_node(ctx) or ""
sdk_name = apple_toolchain_info.sdk_name
target_triple_with_version_placeholder = _TARGET_TRIPLE_MAP.get(sdk_name)
if target_triple_with_version_placeholder == None:
fail("Could not find target triple for sdk = {}".format(sdk_name))
versioned_target_triple = target_triple_with_version_placeholder.replace(_VERSION_PLACEHOLDER, target_sdk_version)
return "{}-{}".format(architecture, versioned_target_triple)
def expand_relative_prefixed_sdk_path(
sdk_path: "cmd_args",
swift_resource_dir: "cmd_args",
platform_path: "cmd_args",
path_to_expand: str.type) -> "cmd_args":
path_expansion_map = {
"$PLATFORM_DIR": platform_path,
"$RESOURCEDIR": swift_resource_dir,
"$SDKROOT": sdk_path,
}
expanded_cmd = cmd_args()
for (path_variable, path_value) in path_expansion_map.items():
if path_to_expand.startswith(path_variable):
path = path_to_expand[len(path_variable):]
if path.find("$") == 0:
fail("Failed to expand framework path: {}".format(path))
expanded_cmd.add(cmd_args([path_value, path], delimiter = ""))
return expanded_cmd
def get_disable_pch_validation_flags() -> [str.type]:
"""
We need to disable PCH validation for some actions like Swift compilation and Swift PCM generation.
Currently, we don't have a mechanism to compile with enabled pch validation and Swift explicit modules,
which we need to be able to do while we are waiting for Anonymous targets which will allow us to solve this problem properly.
"""
return [
"-Xcc",
"-Xclang",
"-Xcc",
"-fno-validate-pch",
]

View File

@ -0,0 +1,88 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolsInfo")
load(
"@prelude//cxx:headers.bzl",
"CHeader", # @unused Used as a type
)
load(
"@prelude//cxx:preprocessor.bzl",
"CPreprocessor",
)
load(":apple_utility.bzl", "get_module_name")
def preprocessor_info_for_modulemap(ctx: "context", name: str.type, headers: [CHeader.type], swift_header: ["artifact", None]) -> "CPreprocessor":
# We don't want to name this module.modulemap to avoid implicit importing
if name == "module":
fail("Don't use the name `module` for modulemaps, this will allow for implicit importing.")
module_name = get_module_name(ctx)
# Create a map of header import path to artifact location
header_map = {}
for h in headers:
if h.namespace:
header_map["{}/{}".format(h.namespace, h.name)] = h.artifact
else:
header_map[h.name] = h.artifact
# We need to include the Swift header in the symlink tree too
swift_header_name = "{}/{}-Swift.h".format(module_name, module_name)
if swift_header:
header_map[swift_header_name] = swift_header
# Create a symlink dir for the headers to import
symlink_tree = ctx.actions.symlinked_dir(name + "_symlink_tree", header_map)
# Create a modulemap at the root of that tree
output = ctx.actions.declare_output(name + ".modulemap")
cmd = cmd_args(ctx.attrs._apple_tools[AppleToolsInfo].make_modulemap)
cmd.add([
"--output",
output.as_output(),
"--name",
get_module_name(ctx),
"--symlink-tree",
symlink_tree,
])
if swift_header:
cmd.add([
"--swift-header",
swift_header,
])
if ctx.attrs.use_submodules:
cmd.add("--use-submodules")
for hdr in sorted(header_map.keys()):
# Don't include the Swift header in the mappings, this is handled separately.
if hdr != swift_header_name:
cmd.add(hdr)
ctx.actions.run(cmd, category = "modulemap", identifier = name)
return CPreprocessor(
modular_args = _args_for_modulemap(output, symlink_tree, swift_header),
modulemap_path = cmd_args(output).hidden(cmd_args(symlink_tree)),
args = _exported_preprocessor_args(symlink_tree),
)
def _args_for_modulemap(
modulemap: "artifact",
symlink_tree: "artifact",
swift_header: ["artifact", None]) -> ["cmd_args"]:
cmd = cmd_args(modulemap, format = "-fmodule-map-file={}")
cmd.hidden(symlink_tree)
if swift_header:
cmd.hidden(swift_header)
return [cmd]
def _exported_preprocessor_args(symlink_tree: "artifact") -> ["cmd_args"]:
return [cmd_args(symlink_tree, format = "-I{}")]

View File

@ -0,0 +1,110 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(
"@prelude//cxx:cxx_library_utility.bzl",
"cxx_attr_exported_linker_flags",
"cxx_platform_supported",
)
load(
"@prelude//cxx:preprocessor.bzl",
"CPreprocessor",
"cxx_inherited_preprocessor_infos",
"cxx_merge_cpreprocessors",
)
load(
"@prelude//linking:link_groups.bzl",
"merge_link_group_lib_info",
)
load(
"@prelude//linking:link_info.bzl",
"LinkInfo",
"LinkInfos",
"LinkStyle",
"Linkage",
"create_merged_link_info",
)
load(
"@prelude//linking:linkable_graph.bzl",
"create_linkable_graph",
"create_linkable_graph_node",
"create_linkable_node",
)
load(
"@prelude//linking:shared_libraries.bzl",
"SharedLibraryInfo",
"merge_shared_libraries",
)
load("@prelude//utils:utils.bzl", "filter_and_map_idx")
load(":apple_bundle_types.bzl", "AppleBundleInfo")
load(":apple_frameworks.bzl", "to_framework_name")
def prebuilt_apple_framework_impl(ctx: "context") -> ["provider"]:
providers = []
framework_directory_artifact = ctx.attrs.framework
# Check this rule's `supported_platforms_regex` with the current platform.
if cxx_platform_supported(ctx):
# Sandbox the framework, to avoid leaking other frameworks via search paths.
framework_name = to_framework_name(framework_directory_artifact.basename)
framework_dir = ctx.actions.symlinked_dir(
"Frameworks",
{framework_name + ".framework": framework_directory_artifact},
)
# Add framework & pp info from deps.
inherited_pp_info = cxx_inherited_preprocessor_infos(ctx.attrs.deps)
providers.append(cxx_merge_cpreprocessors(
ctx,
[CPreprocessor(args = ["-F", framework_dir])],
inherited_pp_info,
))
# Add framework to link args.
# TODO(T110378120): Support shared linking for mac targets:
# https://fburl.com/code/pqrtt1qr.
args = []
args.extend(cxx_attr_exported_linker_flags(ctx))
args.extend(["-F", framework_dir])
args.extend(["-framework", framework_name])
link = LinkInfo(
name = framework_name,
pre_flags = args,
)
providers.append(create_merged_link_info(
ctx,
{link_style: LinkInfos(default = link) for link_style in LinkStyle},
))
# Create, augment and provide the linkable graph.
linkable_graph = create_linkable_graph(
ctx,
node = create_linkable_graph_node(
ctx,
linkable_node = create_linkable_node(
ctx,
preferred_linkage = Linkage("shared"),
link_infos = {LinkStyle("shared"): LinkInfos(default = link)},
),
excluded = {ctx.label: None},
),
)
providers.append(linkable_graph)
# The default output is the provided framework.
providers.append(DefaultInfo(default_output = framework_directory_artifact))
providers.append(AppleBundleInfo(
bundle = framework_directory_artifact,
is_watchos = None,
skip_copying_swift_stdlib = True,
contains_watchapp = None,
))
providers.append(merge_link_group_lib_info(deps = ctx.attrs.deps))
providers.append(merge_shared_libraries(ctx.actions, deps = filter_and_map_idx(SharedLibraryInfo, ctx.attrs.deps)))
return providers

View File

@ -0,0 +1,148 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(
"@prelude//cxx:groups.bzl",
"MATCH_ALL_LABEL",
)
load(
"@prelude//utils:graph_utils.bzl",
"breadth_first_traversal_by",
)
load(":apple_asset_catalog_types.bzl", "AppleAssetCatalogSpec")
load(":apple_core_data_types.bzl", "AppleCoreDataSpec")
load(":apple_resource_types.bzl", "AppleResourceSpec")
ResourceGroupInfo = provider(fields = [
"groups", # [Group.type]
"groups_hash", # str.type
"mappings", # {"label": str.type}
])
ResourceGraphNode = record(
label = field("label"),
# Attribute labels on the target.
labels = field([str.type], []),
# Deps of this target which might have resources transitively.
deps = field(["label"], []),
# Exported deps of this target which might have resources transitively.
exported_deps = field(["label"], []),
# Actual resource data, present when node corresponds to `apple_resource` target.
resource_spec = field([AppleResourceSpec.type, None], None),
# Actual asset catalog data, present when node corresponds to `apple_asset_catalog` target.
asset_catalog_spec = field([AppleAssetCatalogSpec.type, None], None),
# Actual core data, present when node corresponds to `core_data_model` target
core_data_spec = field([AppleCoreDataSpec.type, None], None),
)
ResourceGraphTSet = transitive_set()
ResourceGraph = provider(fields = [
"label", # "label"
"nodes", # "ResourceGraphTSet"
])
def create_resource_graph(
ctx: "context",
labels: [str.type],
deps: ["dependency"],
exported_deps: ["dependency"],
resource_spec: [AppleResourceSpec.type, None] = None,
asset_catalog_spec: [AppleAssetCatalogSpec.type, None] = None,
core_data_spec: [AppleCoreDataSpec.type, None] = None) -> ResourceGraph.type:
node = ResourceGraphNode(
label = ctx.label,
labels = labels,
deps = _with_resources_deps(deps),
exported_deps = _with_resources_deps(exported_deps),
resource_spec = resource_spec,
asset_catalog_spec = asset_catalog_spec,
core_data_spec = core_data_spec,
)
all_deps = deps + exported_deps
child_nodes = filter(None, [d.get(ResourceGraph) for d in all_deps])
return ResourceGraph(
label = ctx.label,
nodes = ctx.actions.tset(ResourceGraphTSet, value = node, children = [child_node.nodes for child_node in child_nodes]),
)
def get_resource_graph_node_map_func(graph: ResourceGraph.type):
def get_resource_graph_node_map() -> {"label": ResourceGraphNode.type}:
nodes = graph.nodes.traverse()
return {node.label: node for node in filter(None, nodes)}
return get_resource_graph_node_map
def _with_resources_deps(deps: ["dependency"]) -> ["label"]:
"""
Filters dependencies and returns only those which are relevant
to working with resources i.e. those which contains resource graph provider.
"""
graphs = filter(None, [d.get(ResourceGraph) for d in deps])
return [g.label for g in graphs]
def get_resource_group_info(ctx: "context") -> [ResourceGroupInfo.type, None]:
"""
Parses the currently analyzed context for any resource group definitions
and returns a list of all resource groups with their mappings.
"""
resource_group_map = ctx.attrs.resource_group_map
if not resource_group_map:
return None
if type(resource_group_map) == "dependency":
return resource_group_map[ResourceGroupInfo]
fail("Resource group maps must be provided as a resource_group_map rule dependency.")
def get_filtered_resources(
root: "label",
resource_graph_node_map_func,
resource_group: [str.type, None],
resource_group_mappings: [{"label": str.type}, None]) -> ([AppleResourceSpec.type], [AppleAssetCatalogSpec.type], [AppleCoreDataSpec.type]):
"""
Walks the provided DAG and collects resources matching resource groups definition.
"""
resource_graph_node_map = resource_graph_node_map_func()
def get_traversed_deps(target: "label") -> ["label"]:
node = resource_graph_node_map[target] # buildifier: disable=uninitialized
return node.exported_deps + node.deps
targets = breadth_first_traversal_by(
resource_graph_node_map,
get_traversed_deps(root),
get_traversed_deps,
)
resource_specs = []
asset_catalog_specs = []
core_data_specs = []
for target in targets:
target_resource_group = resource_group_mappings.get(target)
# Ungrouped targets belong to the unlabeled bundle
if ((not target_resource_group and not resource_group) or
# Does it match special "MATCH_ALL" mapping?
target_resource_group == MATCH_ALL_LABEL or
# Does it match currently evaluated group?
target_resource_group == resource_group):
node = resource_graph_node_map[target]
resource_spec = node.resource_spec
if resource_spec:
resource_specs.append(resource_spec)
asset_catalog_spec = node.asset_catalog_spec
if asset_catalog_spec:
asset_catalog_specs.append(asset_catalog_spec)
core_data_spec = node.core_data_spec
if core_data_spec:
core_data_specs.append(core_data_spec)
return resource_specs, asset_catalog_specs, core_data_specs

View File

@ -0,0 +1,42 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(":swift_sdk_pcm_compilation.bzl", "get_shared_pcm_compilation_args")
load(":swift_toolchain_types.bzl", "SdkSwiftOverlayInfo", "SdkUncompiledModuleInfo")
def apple_sdk_clang_module_impl(ctx: "context") -> ["provider"]:
cmd = get_shared_pcm_compilation_args(ctx.attrs.target, ctx.attrs.module_name)
overlays = []
if ctx.attrs.overlays:
overlays = [SdkSwiftOverlayInfo(overlays = ctx.attrs.overlays)]
return [
DefaultInfo(),
SdkUncompiledModuleInfo(
name = ctx.attrs.name,
module_name = ctx.attrs.module_name,
is_framework = ctx.attrs.is_framework,
is_swiftmodule = False,
partial_cmd = cmd,
input_relative_path = ctx.attrs.modulemap_relative_path,
deps = ctx.attrs.deps,
),
] + overlays
# This rule represent a Clang module from SDK and forms a graph of dependencies between such modules.
apple_sdk_clang_module = rule(
impl = apple_sdk_clang_module_impl,
attrs = {
"deps": attrs.list(attrs.dep(), default = []),
"is_framework": attrs.bool(default = False),
# This is a real module name, contrary to `name`
# which has a special suffix to distinguish Swift and Clang modules with the same name
"module_name": attrs.string(),
"modulemap_relative_path": attrs.string(),
"overlays": attrs.dict(key = attrs.string(), value = attrs.list(attrs.string(), default = []), sorted = False, default = {}),
"target": attrs.string(),
},
)

View File

@ -0,0 +1,83 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//utils:set.bzl", "set")
load(":swift_toolchain_types.bzl", "SdkSwiftOverlayInfo", "WrappedSdkCompiledModuleInfo")
def project_as_hidden(module_info: "SdkCompiledModuleInfo"):
# NOTE(cjhopman): This would probably be better done by projecting as normal args and the caller putting it in hidden.
args = cmd_args()
args.hidden(module_info.output_artifact)
return args
def project_as_clang_deps(module_info: "SdkCompiledModuleInfo"):
if module_info.is_swiftmodule:
return []
else:
return [
"-Xcc",
cmd_args(["-fmodule-file=", module_info.module_name, "=", module_info.output_artifact], delimiter = ""),
"-Xcc",
cmd_args(["-fmodule-map-file=", module_info.input_relative_path], delimiter = ""),
]
SDKDepTSet = transitive_set(args_projections = {
"clang_deps": project_as_clang_deps,
"hidden": project_as_hidden,
})
def is_sdk_modules_provided(toolchain: "SwiftToolchainInfo") -> bool.type:
no_swift_modules = toolchain.uncompiled_swift_sdk_modules_deps == None or len(toolchain.uncompiled_swift_sdk_modules_deps) == 0
no_clang_modules = toolchain.uncompiled_clang_sdk_modules_deps == None or len(toolchain.uncompiled_clang_sdk_modules_deps) == 0
if no_swift_modules and no_clang_modules:
return False
return True
def get_compiled_sdk_deps_tset(ctx: "context", deps_providers: list.type) -> "SDKDepTSet":
sdk_deps = [
deps_provider[WrappedSdkCompiledModuleInfo].tset
for deps_provider in deps_providers
if WrappedSdkCompiledModuleInfo in deps_provider
]
return ctx.actions.tset(SDKDepTSet, children = sdk_deps)
def get_uncompiled_sdk_deps(
sdk_modules: [str.type],
required_modules: [str.type],
toolchain: "SwiftToolchainInfo") -> ["dependency"]:
if not is_sdk_modules_provided(toolchain):
fail("SDK deps are not set for swift_toolchain")
all_sdk_modules = sdk_modules + required_modules
all_sdk_modules = set(all_sdk_modules)
sdk_deps = []
sdk_overlays = []
def process_sdk_module_dep(dep_name, uncompiled_sdk_modules_map):
if dep_name not in uncompiled_sdk_modules_map:
return
sdk_dep = uncompiled_sdk_modules_map[dep_name]
sdk_deps.append(sdk_dep)
if SdkSwiftOverlayInfo not in sdk_dep:
return
overlay_info = sdk_dep[SdkSwiftOverlayInfo]
for underlying_module, overlay_modules in overlay_info.overlays.items():
# Only add a cross import SDK overlay if both modules associated with the overlay are required
if all_sdk_modules.contains(underlying_module):
# Cross import overlays themselves are always Swift modules, but the underlying module
# can be a Swift module or a Clang module
sdk_overlays.extend([toolchain.uncompiled_swift_sdk_modules_deps[overlay_name] for overlay_name in overlay_modules if overlay_name in toolchain.uncompiled_swift_sdk_modules_deps])
for sdk_module_dep_name in all_sdk_modules.list():
process_sdk_module_dep(sdk_module_dep_name, toolchain.uncompiled_swift_sdk_modules_deps)
process_sdk_module_dep(sdk_module_dep_name, toolchain.uncompiled_clang_sdk_modules_deps)
return sdk_deps + sdk_overlays

View File

@ -0,0 +1,68 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_utility.bzl", "get_disable_pch_validation_flags")
load(":swift_toolchain_types.bzl", "SdkSwiftOverlayInfo", "SdkUncompiledModuleInfo")
def apple_sdk_swift_module_impl(ctx: "context") -> ["provider"]:
module_name = ctx.attrs.module_name
cmd = cmd_args([
"-frontend",
"-compile-module-from-interface",
"-disable-implicit-swift-modules",
"-serialize-parseable-module-interface-dependency-hashes",
"-disable-modules-validate-system-headers",
"-suppress-warnings",
"-module-name",
module_name,
"-target",
ctx.attrs.target,
"-Xcc",
"-fno-implicit-modules",
"-Xcc",
"-fno-implicit-module-maps",
])
cmd.add(get_disable_pch_validation_flags())
if module_name == "Swift" or module_name == "SwiftOnoneSupport":
cmd.add([
"-parse-stdlib",
])
overlays = []
if ctx.attrs.overlays:
overlays = [SdkSwiftOverlayInfo(overlays = ctx.attrs.overlays)]
return [
DefaultInfo(),
SdkUncompiledModuleInfo(
name = ctx.attrs.name,
module_name = ctx.attrs.module_name,
is_framework = ctx.attrs.is_framework,
is_swiftmodule = True,
partial_cmd = cmd,
input_relative_path = ctx.attrs.swiftinterface_relative_path,
deps = ctx.attrs.deps,
),
] + overlays
# This rule represent a Swift module from SDK and forms a graph of dependencies between such modules.
apple_sdk_swift_module = rule(
impl = apple_sdk_swift_module_impl,
attrs = {
"deps": attrs.list(attrs.dep(), default = []),
"is_framework": attrs.bool(default = False),
# This is a real module name, contrary to `name`
# which has a special suffix to distinguish Swift and Clang modules with the same name
"module_name": attrs.string(),
"overlays": attrs.dict(key = attrs.string(), value = attrs.list(attrs.string(), default = []), sorted = False, default = {}),
# A prefixed path ($SDKROOT/$PLATFORM_DIR) to swiftinterface textual file.
"swiftinterface_relative_path": attrs.option(attrs.string(), default = None), # if `swiftinterface` is None represents a Root node.
"target": attrs.string(),
},
)

View File

@ -0,0 +1,605 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:paths.bzl", "paths")
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo")
load("@prelude//apple:apple_utility.bzl", "get_disable_pch_validation_flags", "get_explicit_modules_env_var", "get_module_name", "get_versioned_target_triple")
load("@prelude//apple:modulemap.bzl", "preprocessor_info_for_modulemap")
load(
"@prelude//cxx:compile.bzl",
"CxxSrcWithFlags", # @unused Used as a type
)
load("@prelude//cxx:cxx_types.bzl", "CxxAdditionalArgsfileParams")
load(
"@prelude//cxx:debug.bzl",
"ExternalDebugInfoTSet", # @unused Used as a type
"maybe_external_debug_info",
)
load("@prelude//cxx:headers.bzl", "CHeader")
load(
"@prelude//cxx:preprocessor.bzl",
"CPreprocessor",
"cxx_inherited_preprocessor_infos",
"cxx_merge_cpreprocessors",
)
load(":apple_sdk_modules_utility.bzl", "get_compiled_sdk_deps_tset", "get_uncompiled_sdk_deps", "is_sdk_modules_provided")
load(":swift_module_map.bzl", "write_swift_module_map_with_swift_deps")
load(":swift_pcm_compilation.bzl", "PcmDepTSet", "compile_underlying_pcm", "get_compiled_pcm_deps_tset", "get_swift_pcm_anon_targets")
load(":swift_pcm_compilation_types.bzl", "SwiftPCMUncompiledInfo")
load(":swift_sdk_pcm_compilation.bzl", "get_swift_sdk_pcm_anon_targets")
load(":swift_sdk_swiftinterface_compilation.bzl", "get_swift_interface_anon_targets")
def _add_swiftmodule_search_path(swiftmodule_path: "artifact"):
# Value will contain a path to the artifact,
# while we need only the folder which contains the artifact.
return ["-I", cmd_args(swiftmodule_path).parent()]
def _hidden_projection(swiftmodule_path: "artifact"):
return swiftmodule_path
def _linker_args_projection(swiftmodule_path: "artifact"):
return cmd_args(swiftmodule_path, format = "-Wl,-add_ast_path,{}")
SwiftmodulePathsTSet = transitive_set(args_projections = {
"hidden": _hidden_projection,
"linker_args": _linker_args_projection,
"module_search_path": _add_swiftmodule_search_path,
})
ExportedHeadersTSet = transitive_set()
SwiftDependencyInfo = provider(fields = [
"exported_headers", # ExportedHeadersTSet of {"module_name": [exported_headers]}
"exported_swiftmodule_paths", # SwiftmodulePathsTSet of artifact that includes only paths through exported_deps, used for compilation
"transitive_swiftmodule_paths", # SwiftmodulePathsTSet of artifact that includes all transitive paths, used for linking
"external_debug_info",
])
SwiftCompilationOutput = record(
# The object file output from compilation.
object_file = field("artifact"),
# The swiftmodule file output from compilation.
swiftmodule = field("artifact"),
# The dependency info provider that provides the swiftmodule
# search paths required for compilation.
providers = field(["SwiftDependencyInfo"]),
# Preprocessor info required for ObjC compilation of this library.
pre = field(CPreprocessor.type),
# Exported preprocessor info required for ObjC compilation of rdeps.
exported_pre = field(CPreprocessor.type),
# Argsfile to compile an object file which is used by some subtargets.
swift_argsfile = field("CxxAdditionalArgsfileParams"),
)
REQUIRED_SDK_MODULES = ["Swift", "SwiftOnoneSupport", "Darwin", "_Concurrency", "_StringProcessing"]
def get_swift_anonymous_targets(ctx: "context", get_apple_library_providers: "function") -> "promise":
swift_cxx_flags = get_swift_cxx_flags(ctx)
# Get SDK deps from direct dependencies,
# all transitive deps will be compiled recursively.
direct_uncompiled_sdk_deps = get_uncompiled_sdk_deps(
ctx.attrs.sdk_modules,
REQUIRED_SDK_MODULES,
ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info,
)
# Recursively compiling headers of direct and transitive deps as PCM modules,
# passing apple_library's cxx flags through that must be used for all downward PCM compilations.
pcm_targets = get_swift_pcm_anon_targets(
ctx,
ctx.attrs.deps + ctx.attrs.exported_deps,
swift_cxx_flags,
)
# Recursively compiling SDK's Clang dependencies,
# passing apple_library's cxx flags through that must be used for all downward PCM compilations.
sdk_pcm_targets = get_swift_sdk_pcm_anon_targets(
ctx,
direct_uncompiled_sdk_deps,
swift_cxx_flags,
)
# Recursively compiling SDK's Swift dependencies,
# passing apple_library's cxx flags through that must be used for all downward PCM compilations.
swift_interface_anon_targets = get_swift_interface_anon_targets(
ctx,
direct_uncompiled_sdk_deps,
swift_cxx_flags,
)
return ctx.actions.anon_targets(pcm_targets + sdk_pcm_targets + swift_interface_anon_targets).map(get_apple_library_providers)
def get_swift_cxx_flags(ctx: "context") -> [str.type]:
"""Iterates through `swift_compiler_flags` and returns a list of flags that might affect Clang compilation"""
gather, next = ([], False)
for f in ctx.attrs.swift_compiler_flags:
if next:
gather.append("-Xcc")
gather.append(str(f).replace('\"', ""))
next = str(f) == "\"-Xcc\""
if ctx.attrs.enable_cxx_interop:
gather += ["-Xfrontend", "-enable-cxx-interop"]
if ctx.attrs.swift_version != None:
gather += ["-swift-version", ctx.attrs.swift_version]
return gather
def compile_swift(
ctx: "context",
srcs: [CxxSrcWithFlags.type],
deps_providers: list.type,
exported_headers: [CHeader.type],
objc_modulemap_pp_info: ["CPreprocessor", None],
extra_search_paths_flags: ["_arglike"] = []) -> ["SwiftCompilationOutput", None]:
if not srcs:
return None
# If a target exports ObjC headers and Swift explicit modules are enabled,
# we need to precompile a PCM of the underlying module and supply it to the Swift compilation.
if objc_modulemap_pp_info and ctx.attrs.uses_explicit_modules:
underlying_swift_pcm_uncompiled_info = get_swift_pcm_uncompile_info(
ctx,
None,
objc_modulemap_pp_info,
)
if underlying_swift_pcm_uncompiled_info:
compiled_underlying_pcm = compile_underlying_pcm(
ctx,
underlying_swift_pcm_uncompiled_info,
deps_providers,
get_swift_cxx_flags(ctx),
)
else:
compiled_underlying_pcm = None
else:
compiled_underlying_pcm = None
toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info
module_name = get_module_name(ctx)
output_header = ctx.actions.declare_output(module_name + "-Swift.h")
output_object = ctx.actions.declare_output(module_name + ".o")
output_swiftmodule = ctx.actions.declare_output(module_name + ".swiftmodule")
shared_flags = _get_shared_flags(
ctx,
deps_providers,
compiled_underlying_pcm,
module_name,
exported_headers,
objc_modulemap_pp_info,
extra_search_paths_flags,
)
if toolchain.can_toolchain_emit_obj_c_header_textually:
_compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, output_header)
else:
unprocessed_header = ctx.actions.declare_output(module_name + "-SwiftUnprocessed.h")
_compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, unprocessed_header)
_perform_swift_postprocessing(ctx, module_name, unprocessed_header, output_header)
swift_argsfile = _compile_object(ctx, toolchain, shared_flags, srcs, output_object)
# Swift libraries extend the ObjC modulemaps to include the -Swift.h header
modulemap_pp_info = preprocessor_info_for_modulemap(ctx, "swift-extended", exported_headers, output_header)
exported_swift_header = CHeader(
artifact = output_header,
name = output_header.basename,
namespace = module_name,
named = False,
)
exported_pp_info = CPreprocessor(
headers = [exported_swift_header],
modular_args = modulemap_pp_info.modular_args,
args = modulemap_pp_info.args,
modulemap_path = modulemap_pp_info.modulemap_path,
)
# We also need to include the unprefixed -Swift.h header in this libraries preprocessor info
swift_header = CHeader(
artifact = output_header,
name = output_header.basename,
namespace = "",
named = False,
)
pre = CPreprocessor(headers = [swift_header])
# Pass up the swiftmodule paths for this module and its exported_deps
return SwiftCompilationOutput(
object_file = output_object,
swiftmodule = output_swiftmodule,
providers = [get_swift_dependency_info(ctx, exported_pp_info, output_swiftmodule)],
pre = pre,
exported_pre = exported_pp_info,
swift_argsfile = swift_argsfile,
)
# Swift headers are postprocessed to make them compatible with Objective-C
# compilation that does not use -fmodules. This is a workaround for the bad
# performance of -fmodules without Explicit Modules, once Explicit Modules is
# supported, this postprocessing should be removed.
def _perform_swift_postprocessing(
ctx: "context",
module_name: "string",
unprocessed_header: "artifact",
output_header: "artifact"):
transitive_exported_headers = {
module: module_exported_headers
for exported_headers_map in _get_exported_headers_tset(ctx).traverse()
if exported_headers_map
for module, module_exported_headers in exported_headers_map.items()
}
deps_json = ctx.actions.write_json(module_name + "-Deps.json", transitive_exported_headers)
postprocess_cmd = cmd_args(ctx.attrs._apple_tools[AppleToolsInfo].swift_objc_header_postprocess)
postprocess_cmd.add([
unprocessed_header,
deps_json,
output_header.as_output(),
])
ctx.actions.run(postprocess_cmd, category = "swift_objc_header_postprocess")
# We use separate actions for swiftmodule and object file output. This
# improves build parallelism at the cost of duplicated work, but by disabling
# type checking in function bodies the swiftmodule compilation can be done much
# faster than object file output.
def _compile_swiftmodule(
ctx: "context",
toolchain: "SwiftToolchainInfo",
shared_flags: "cmd_args",
srcs: [CxxSrcWithFlags.type],
output_swiftmodule: "artifact",
output_header: "artifact") -> "CxxAdditionalArgsfileParams":
argfile_cmd = cmd_args(shared_flags)
argfile_cmd.add([
"-Xfrontend",
"-experimental-skip-non-inlinable-function-bodies-without-types",
"-emit-module",
"-emit-objc-header",
])
cmd = cmd_args([
"-emit-module-path",
output_swiftmodule.as_output(),
"-emit-objc-header-path",
output_header.as_output(),
])
return _compile_with_argsfile(ctx, "swiftmodule_compile", argfile_cmd, srcs, cmd, toolchain)
def _compile_object(
ctx: "context",
toolchain: "SwiftToolchainInfo",
shared_flags: "cmd_args",
srcs: [CxxSrcWithFlags.type],
output_object: "artifact") -> "CxxAdditionalArgsfileParams":
cmd = cmd_args([
"-emit-object",
"-o",
output_object.as_output(),
])
return _compile_with_argsfile(ctx, "swift_compile", shared_flags, srcs, cmd, toolchain)
def _compile_with_argsfile(
ctx: "context",
name: str.type,
shared_flags: "cmd_args",
srcs: [CxxSrcWithFlags.type],
additional_flags: "cmd_args",
toolchain: "SwiftToolchainInfo") -> "CxxAdditionalArgsfileParams":
shell_quoted_cmd = cmd_args(shared_flags, quote = "shell")
argfile, _ = ctx.actions.write(name + ".argsfile", shell_quoted_cmd, allow_args = True)
cmd = cmd_args(toolchain.compiler)
cmd.add(additional_flags)
cmd.add(cmd_args(["@", argfile], delimiter = ""))
cmd.add([s.file for s in srcs])
# Swift compilation on RE without explicit modules is impractically expensive
# because there's no shared module cache across different libraries.
prefer_local = not uses_explicit_modules(ctx)
# Argsfile should also depend on all artifacts in it, otherwise they won't be materialised.
cmd.hidden([shared_flags])
# If we prefer to execute locally (e.g., for perf reasons), ensure we upload to the cache,
# so that CI builds populate caches used by developer machines.
explicit_modules_enabled = uses_explicit_modules(ctx)
# Make it easier to debug whether Swift actions get compiled with explicit modules or not
category = name + ("_with_explicit_mods" if explicit_modules_enabled else "")
ctx.actions.run(
cmd,
env = get_explicit_modules_env_var(explicit_modules_enabled),
category = category,
prefer_local = prefer_local,
allow_cache_upload = prefer_local,
)
hidden_args = [shared_flags]
return CxxAdditionalArgsfileParams(file = argfile, hidden_args = hidden_args, extension = ".swift")
def _get_shared_flags(
ctx: "context",
deps_providers: list.type,
underlying_module: ["SwiftPCMCompiledInfo", None],
module_name: str.type,
objc_headers: [CHeader.type],
objc_modulemap_pp_info: ["CPreprocessor", None],
extra_search_paths_flags: ["_arglike"] = []) -> "cmd_args":
toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info
cmd = cmd_args()
cmd.add([
# This allows us to use a relative path for the compiler resource directory.
"-working-directory",
".",
"-sdk",
toolchain.sdk_path,
"-target",
get_versioned_target_triple(ctx),
"-wmo",
"-module-name",
module_name,
"-parse-as-library",
# Disable Clang module breadcrumbs in the DWARF info. These will not be
# debug prefix mapped and are not shareable across machines.
"-Xfrontend",
"-no-clang-module-breadcrumbs",
"-Xfrontend",
"-enable-cross-import-overlays",
])
if uses_explicit_modules(ctx):
cmd.add(get_disable_pch_validation_flags())
if toolchain.resource_dir:
cmd.add([
"-resource-dir",
toolchain.resource_dir,
])
if ctx.attrs.swift_version:
cmd.add(["-swift-version", ctx.attrs.swift_version])
if ctx.attrs.enable_cxx_interop:
cmd.add(["-enable-experimental-cxx-interop"])
serialize_debugging_options = False
if ctx.attrs.serialize_debugging_options:
if objc_headers:
# TODO(T99100029): We cannot use VFS overlays with Buck2, so we have to disable
# serializing debugging options for mixed libraries to debug successfully
warning("Mixed libraries cannot serialize debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label))
elif not toolchain.prefix_serialized_debugging_options:
warning("The current toolchain does not support prefixing serialized debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label))
else:
# Apply the debug prefix map to Swift serialized debugging info.
# This will allow for debugging remotely built swiftmodule files.
serialize_debugging_options = True
if serialize_debugging_options:
cmd.add([
"-Xfrontend",
"-serialize-debugging-options",
"-Xfrontend",
"-prefix-serialized-debugging-options",
])
else:
cmd.add([
"-Xfrontend",
"-no-serialize-debugging-options",
])
if toolchain.can_toolchain_emit_obj_c_header_textually:
cmd.add([
"-Xfrontend",
"-emit-clang-header-nonmodular-includes",
])
pcm_deps_tset = get_compiled_pcm_deps_tset(ctx, deps_providers)
sdk_deps_tset = get_compiled_sdk_deps_tset(ctx, deps_providers)
# Add flags required to import ObjC module dependencies
_add_clang_deps_flags(ctx, pcm_deps_tset, sdk_deps_tset, cmd)
_add_swift_deps_flags(ctx, sdk_deps_tset, cmd)
# Add flags for importing the ObjC part of this library
_add_mixed_library_flags_to_cmd(ctx, cmd, underlying_module, objc_headers, objc_modulemap_pp_info)
# Add toolchain and target flags last to allow for overriding defaults
cmd.add(toolchain.compiler_flags)
cmd.add(ctx.attrs.swift_compiler_flags)
cmd.add(extra_search_paths_flags)
return cmd
def _add_swift_deps_flags(
ctx: "context",
sdk_deps_tset: "SDKDepTSet",
cmd: "cmd_args"):
# If Explicit Modules are enabled, a few things must be provided to a compilation job:
# 1. Direct and transitive SDK deps from `sdk_modules` attribute.
# 2. Direct and transitive user-defined deps.
# 3. Transitive SDK deps of user-defined deps.
# (This is the case, when a user-defined dep exports a type from SDK module,
# thus such SDK module should be implicitly visible to consumers of that custom dep)
if uses_explicit_modules(ctx):
module_name = get_module_name(ctx)
swift_deps_tset = ctx.actions.tset(
SwiftmodulePathsTSet,
children = _get_swift_paths_tsets(ctx.attrs.deps + ctx.attrs.exported_deps),
)
swift_module_map_artifact = write_swift_module_map_with_swift_deps(
ctx,
module_name,
list(sdk_deps_tset.traverse()),
list(swift_deps_tset.traverse()),
)
cmd.add([
"-Xcc",
"-fno-implicit-modules",
"-Xcc",
"-fno-implicit-module-maps",
"-Xfrontend",
"-disable-implicit-swift-modules",
"-Xfrontend",
"-explicit-swift-module-map-file",
"-Xfrontend",
swift_module_map_artifact,
])
# Swift compilation should depend on transitive Swift modules from swift-module-map.
cmd.hidden(sdk_deps_tset.project_as_args("hidden"))
cmd.hidden(swift_deps_tset.project_as_args("hidden"))
else:
depset = ctx.actions.tset(SwiftmodulePathsTSet, children = _get_swift_paths_tsets(ctx.attrs.deps + ctx.attrs.exported_deps))
cmd.add(depset.project_as_args("module_search_path"))
def _add_clang_deps_flags(
ctx: "context",
pcm_deps_tset: "PcmDepTSet",
sdk_deps_tset: "SDKDepTSet",
cmd: "cmd_args") -> None:
# If a module uses Explicit Modules, all direct and
# transitive Clang deps have to be explicitly added.
if uses_explicit_modules(ctx):
cmd.add(pcm_deps_tset.project_as_args("clang_deps"))
# Add Clang sdk modules which do not go to swift modulemap
cmd.add(sdk_deps_tset.project_as_args("clang_deps"))
else:
inherited_preprocessor_infos = cxx_inherited_preprocessor_infos(ctx.attrs.deps + ctx.attrs.exported_deps)
preprocessors = cxx_merge_cpreprocessors(ctx, [], inherited_preprocessor_infos)
cmd.add(cmd_args(preprocessors.set.project_as_args("args"), prepend = "-Xcc"))
cmd.add(cmd_args(preprocessors.set.project_as_args("modular_args"), prepend = "-Xcc"))
cmd.add(cmd_args(preprocessors.set.project_as_args("include_dirs"), prepend = "-Xcc"))
def _add_mixed_library_flags_to_cmd(
ctx: "context",
cmd: "cmd_args",
underlying_module: ["SwiftPCMCompiledInfo", None],
objc_headers: [CHeader.type],
objc_modulemap_pp_info: ["CPreprocessor", None]) -> None:
if uses_explicit_modules(ctx):
if underlying_module:
cmd.add(ctx.actions.tset(
PcmDepTSet,
value = underlying_module,
).project_as_args("clang_deps"))
cmd.add("-import-underlying-module")
return
if not objc_headers:
return
# TODO(T99100029): We cannot use VFS overlays to mask this import from
# the debugger as they require absolute paths. Instead we will enforce
# that mixed libraries do not have serialized debugging info and rely on
# rdeps to serialize the correct paths.
for arg in objc_modulemap_pp_info.args:
cmd.add("-Xcc")
cmd.add(arg)
for arg in objc_modulemap_pp_info.modular_args:
cmd.add("-Xcc")
cmd.add(arg)
cmd.add("-import-underlying-module")
def _get_swift_paths_tsets(deps: ["dependency"]) -> ["SwiftmodulePathsTSet"]:
return [
d[SwiftDependencyInfo].exported_swiftmodule_paths
for d in deps
if SwiftDependencyInfo in d
]
def _get_transitive_swift_paths_tsets(deps: ["dependency"]) -> ["SwiftmodulePathsTSet"]:
return [
d[SwiftDependencyInfo].transitive_swiftmodule_paths
for d in deps
if SwiftDependencyInfo in d
]
def _get_external_debug_info_tsets(deps: ["dependency"]) -> [ExternalDebugInfoTSet.type]:
return [
d[SwiftDependencyInfo].external_debug_info
for d in deps
if SwiftDependencyInfo in d and d[SwiftDependencyInfo].external_debug_info != None
]
def _get_exported_headers_tset(ctx: "context", exported_headers: [["string"], None] = None) -> "ExportedHeadersTSet":
return ctx.actions.tset(
ExportedHeadersTSet,
value = {get_module_name(ctx): exported_headers} if exported_headers else None,
children = [
dep.exported_headers
for dep in [x.get(SwiftDependencyInfo) for x in ctx.attrs.exported_deps]
if dep and dep.exported_headers
],
)
def get_swift_pcm_uncompile_info(
ctx: "context",
propagated_exported_preprocessor_info: ["CPreprocessorInfo", None],
exported_pre: ["CPreprocessor", None]) -> ["SwiftPCMUncompiledInfo", None]:
swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info
if is_sdk_modules_provided(swift_toolchain):
propagated_pp_args_cmd = cmd_args(propagated_exported_preprocessor_info.set.project_as_args("args"), prepend = "-Xcc") if propagated_exported_preprocessor_info else None
return SwiftPCMUncompiledInfo(
name = get_module_name(ctx),
is_transient = not ctx.attrs.modular or not exported_pre,
exported_preprocessor = exported_pre,
exported_deps = ctx.attrs.exported_deps,
propagated_preprocessor_args_cmd = propagated_pp_args_cmd,
uncompiled_sdk_modules = ctx.attrs.sdk_modules,
)
return None
def get_swift_dependency_info(
ctx: "context",
exported_pre: ["CPreprocessor", None],
output_module: ["artifact", None]) -> "SwiftDependencyInfo":
all_deps = ctx.attrs.exported_deps + ctx.attrs.deps
if ctx.attrs.reexport_all_header_dependencies:
exported_deps = all_deps
else:
exported_deps = ctx.attrs.exported_deps
exported_headers = [_header_basename(header) for header in ctx.attrs.exported_headers]
exported_headers += [header.name for header in exported_pre.headers] if exported_pre else []
if output_module:
exported_swiftmodules = ctx.actions.tset(SwiftmodulePathsTSet, value = output_module, children = _get_swift_paths_tsets(exported_deps))
transitive_swiftmodules = ctx.actions.tset(SwiftmodulePathsTSet, value = output_module, children = _get_transitive_swift_paths_tsets(all_deps))
else:
exported_swiftmodules = ctx.actions.tset(SwiftmodulePathsTSet, children = _get_swift_paths_tsets(exported_deps))
transitive_swiftmodules = ctx.actions.tset(SwiftmodulePathsTSet, children = _get_transitive_swift_paths_tsets(all_deps))
external_debug_info = maybe_external_debug_info(
actions = ctx.actions,
label = ctx.label,
artifacts = [output_module] if output_module != None else [],
children = _get_external_debug_info_tsets(all_deps),
)
return SwiftDependencyInfo(
exported_headers = _get_exported_headers_tset(ctx, exported_headers),
exported_swiftmodule_paths = exported_swiftmodules,
transitive_swiftmodule_paths = transitive_swiftmodules,
external_debug_info = external_debug_info,
)
def _header_basename(header: ["artifact", "string"]) -> "string":
if type(header) == type(""):
return paths.basename(header)
else:
return header.basename
def uses_explicit_modules(ctx: "context") -> bool.type:
swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info
return ctx.attrs.uses_explicit_modules and is_sdk_modules_provided(swift_toolchain)

View File

@ -0,0 +1,40 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
def write_swift_module_map(
ctx: "context",
module_name: str.type,
sdk_deps: ["SdkCompiledModuleInfo"]) -> "artifact":
return write_swift_module_map_with_swift_deps(ctx, module_name, sdk_deps, [])
def write_swift_module_map_with_swift_deps(
ctx: "context",
module_name: str.type,
sdk_swift_deps: ["SdkCompiledModuleInfo"],
swift_deps: ["artifact"]) -> "artifact":
deps = {}
for sdk_dep in sdk_swift_deps:
if sdk_dep.is_swiftmodule:
deps[sdk_dep.module_name] = {
"isFramework": sdk_dep.is_framework,
"moduleName": sdk_dep.module_name,
"modulePath": sdk_dep.output_artifact,
}
for swift_dep in swift_deps:
# The swiftmodule filename always matches the module name
name = swift_dep.basename[:-12]
deps[name] = {
"isFramework": False,
"moduleName": name,
"modulePath": swift_dep,
}
return ctx.actions.write_json(
module_name + ".swift_module_map.json",
deps.values(),
)

View File

@ -0,0 +1,302 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load("@prelude//apple:apple_utility.bzl", "get_explicit_modules_env_var", "get_module_name", "get_versioned_target_triple")
load("@prelude//cxx:preprocessor.bzl", "cxx_inherited_preprocessor_infos", "cxx_merge_cpreprocessors")
load(":apple_sdk_modules_utility.bzl", "get_compiled_sdk_deps_tset", "get_uncompiled_sdk_deps")
load(":swift_pcm_compilation_types.bzl", "SwiftPCMCompiledInfo", "SwiftPCMUncompiledInfo", "WrappedSwiftPCMCompiledInfo")
load(":swift_sdk_pcm_compilation.bzl", "get_shared_pcm_compilation_args", "get_swift_sdk_pcm_anon_targets")
load(":swift_sdk_swiftinterface_compilation.bzl", "get_swift_interface_anon_targets")
load(":swift_toolchain_types.bzl", "WrappedSdkCompiledModuleInfo")
_REQUIRED_SDK_MODULES = ["Foundation"]
def _project_as_clang_deps(value: "SwiftPCMCompiledInfo"):
return cmd_args([
"-Xcc",
cmd_args(["-fmodule-file=", value.name, "=", value.pcm_output], delimiter = ""),
"-Xcc",
cmd_args(["-fmodule-map-file=", value.exported_preprocessor.modulemap_path], delimiter = ""),
"-Xcc",
] + value.exported_preprocessor.args).hidden(value.exported_preprocessor.modular_args)
PcmDepTSet = transitive_set(args_projections = {
"clang_deps": _project_as_clang_deps,
})
def get_compiled_pcm_deps_tset(ctx: "context", pcm_deps_providers: list.type) -> "PcmDepTSet":
pcm_deps = [
pcm_deps_provider[WrappedSwiftPCMCompiledInfo].tset
for pcm_deps_provider in pcm_deps_providers
if WrappedSwiftPCMCompiledInfo in pcm_deps_provider
]
return ctx.actions.tset(PcmDepTSet, children = pcm_deps)
def get_swift_pcm_anon_targets(
ctx: "context",
uncompiled_deps: ["dependency"],
swift_cxx_args: [str.type]):
deps = [
{
"dep": uncompiled_dep,
"pcm_name": uncompiled_dep[SwiftPCMUncompiledInfo].name,
"swift_cxx_args": swift_cxx_args,
"target_sdk_version": ctx.attrs.target_sdk_version,
"_apple_toolchain": ctx.attrs._apple_toolchain,
}
for uncompiled_dep in uncompiled_deps
if SwiftPCMUncompiledInfo in uncompiled_dep
]
return [(_swift_pcm_compilation, d) for d in deps]
def _compile_with_argsfile(
ctx: "context",
category: str.type,
module_name: str.type,
args: "cmd_args",
additional_cmd: "cmd_args"):
shell_quoted_cmd = cmd_args(args, quote = "shell")
argfile, _ = ctx.actions.write(module_name + ".pcm.argsfile", shell_quoted_cmd, allow_args = True)
swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info
cmd = cmd_args(swift_toolchain.compiler)
cmd.add(cmd_args(["@", argfile], delimiter = ""))
# Action should also depend on all artifacts from the argsfile, otherwise they won't be materialised.
cmd.hidden([args])
cmd.add(additional_cmd)
# T142915880 There is an issue with hard links,
# when we compile pcms remotely on linux machines.
local_only = True
ctx.actions.run(
cmd,
env = get_explicit_modules_env_var(True),
category = category,
identifier = module_name,
local_only = local_only,
allow_cache_upload = local_only,
)
def _swift_pcm_compilation_impl(ctx: "context") -> ["promise", ["provider"]]:
def k(compiled_pcm_deps_providers) -> ["provider"]:
uncompiled_pcm_info = ctx.attrs.dep[SwiftPCMUncompiledInfo]
# `compiled_pcm_deps_providers` will contain `WrappedSdkCompiledModuleInfo` providers
# from direct SDK deps and transitive deps that export sdk deps.
sdk_deps_tset = get_compiled_sdk_deps_tset(ctx, compiled_pcm_deps_providers)
# To compile a pcm we only use the exported_deps as those are the only
# ones that should be transitively exported through public headers
pcm_deps_tset = get_compiled_pcm_deps_tset(ctx, compiled_pcm_deps_providers)
# We don't need to compile non-modular or targets that do not export any headers,
# but for the sake of BUCK1 compatibility, we need to pass them up,
# in case they re-export some dependencies.
if uncompiled_pcm_info.is_transient:
return [
DefaultInfo(),
WrappedSwiftPCMCompiledInfo(
tset = ctx.actions.tset(PcmDepTSet, children = [pcm_deps_tset]),
),
WrappedSdkCompiledModuleInfo(
tset = sdk_deps_tset,
),
]
module_name = ctx.attrs.pcm_name
cmd, additional_cmd, pcm_output = _get_base_pcm_flags(
ctx,
module_name,
uncompiled_pcm_info,
sdk_deps_tset,
pcm_deps_tset,
ctx.attrs.swift_cxx_args,
)
# It's possible that modular targets can re-export headers of non-modular targets,
# (e.g `raw_headers`) because of that we need to provide search paths of such targets to
# pcm compilation actions in order for them to be successful.
inherited_preprocessor_infos = cxx_inherited_preprocessor_infos(uncompiled_pcm_info.exported_deps)
preprocessors = cxx_merge_cpreprocessors(ctx, [], inherited_preprocessor_infos)
cmd.add(cmd_args(preprocessors.set.project_as_args("include_dirs"), prepend = "-Xcc"))
# When compiling pcm files, module's exported pps and inherited pps
# must be provided to an action like hmaps which are used for headers resolution.
if uncompiled_pcm_info.propagated_preprocessor_args_cmd:
cmd.add(uncompiled_pcm_info.propagated_preprocessor_args_cmd)
_compile_with_argsfile(
ctx,
"swift_pcm_compile",
module_name,
cmd,
additional_cmd,
)
compiled_pcm = SwiftPCMCompiledInfo(
name = module_name,
pcm_output = pcm_output,
exported_preprocessor = uncompiled_pcm_info.exported_preprocessor,
)
return [
DefaultInfo(default_outputs = [pcm_output]),
WrappedSwiftPCMCompiledInfo(
tset = ctx.actions.tset(PcmDepTSet, value = compiled_pcm, children = [pcm_deps_tset]),
),
WrappedSdkCompiledModuleInfo(
tset = sdk_deps_tset,
),
]
# Skip deps compilations if run not on SdkUncompiledModuleInfo
if SwiftPCMUncompiledInfo not in ctx.attrs.dep:
return []
direct_uncompiled_sdk_deps = get_uncompiled_sdk_deps(
ctx.attrs.dep[SwiftPCMUncompiledInfo].uncompiled_sdk_modules,
_REQUIRED_SDK_MODULES,
ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info,
)
# Recursively compiling SDK's Clang dependencies
sdk_pcm_deps_anon_targets = get_swift_sdk_pcm_anon_targets(
ctx,
direct_uncompiled_sdk_deps,
ctx.attrs.swift_cxx_args,
)
# Recursively compiling SDK's Swift dependencies
# We need to match BUCK1 behavior, which can't distinguish between Swift and Clang SDK modules,
# so we pass more SDK deps than is strictly necessary. When BUCK1 is deprecated, we can try to avoid doing that,
# by passing Clang and Swift deps up separately.
swift_interface_anon_targets = get_swift_interface_anon_targets(
ctx,
direct_uncompiled_sdk_deps,
ctx.attrs.swift_cxx_args,
)
# Recursively compile PCMs of transitevely visible exported_deps
swift_pcm_anon_targets = get_swift_pcm_anon_targets(
ctx,
ctx.attrs.dep[SwiftPCMUncompiledInfo].exported_deps,
ctx.attrs.swift_cxx_args,
)
return ctx.actions.anon_targets(sdk_pcm_deps_anon_targets + swift_pcm_anon_targets + swift_interface_anon_targets).map(k)
_swift_pcm_compilation = rule(
impl = _swift_pcm_compilation_impl,
attrs = {
"dep": attrs.dep(),
"pcm_name": attrs.string(),
"swift_cxx_args": attrs.list(attrs.string(), default = []),
"target_sdk_version": attrs.option(attrs.string(), default = None),
"_apple_toolchain": attrs.dep(),
},
)
def compile_underlying_pcm(
ctx: "context",
uncompiled_pcm_info: "SwiftPCMUncompiledInfo",
compiled_pcm_deps_providers,
swift_cxx_args: [str.type]) -> "SwiftPCMCompiledInfo":
module_name = get_module_name(ctx)
# `compiled_pcm_deps_providers` will contain `WrappedSdkCompiledModuleInfo` providers
# from direct SDK deps and transitive deps that export sdk deps.
sdk_deps_tset = get_compiled_sdk_deps_tset(ctx, compiled_pcm_deps_providers)
# To compile a pcm we only use the exported_deps as those are the only
# ones that should be transitively exported through public headers
pcm_deps_tset = get_compiled_pcm_deps_tset(ctx, compiled_pcm_deps_providers)
cmd, additional_cmd, pcm_output = _get_base_pcm_flags(
ctx,
module_name,
uncompiled_pcm_info,
sdk_deps_tset,
pcm_deps_tset,
swift_cxx_args,
)
modulemap_path = uncompiled_pcm_info.exported_preprocessor.modulemap_path
cmd.add([
"-Xcc",
"-I",
"-Xcc",
cmd_args([cmd_args(modulemap_path).parent(), "exported_symlink_tree"], delimiter = "/"),
])
_compile_with_argsfile(
ctx,
"swift_underlying_pcm_compile",
module_name,
cmd,
additional_cmd,
)
return SwiftPCMCompiledInfo(
name = module_name,
pcm_output = pcm_output,
exported_preprocessor = uncompiled_pcm_info.exported_preprocessor,
)
def _get_base_pcm_flags(
ctx: "context",
module_name: str.type,
uncompiled_pcm_info: "SwiftPCMUncompiledInfo",
sdk_deps_tset: "SDKDepTSet",
pcm_deps_tset: "PcmDepTSet",
swift_cxx_args: [str.type]) -> ("cmd_args", "cmd_args", "artifact"):
swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info
cmd = cmd_args()
cmd.add(get_shared_pcm_compilation_args(get_versioned_target_triple(ctx), module_name))
cmd.add(["-sdk", swift_toolchain.sdk_path])
cmd.add(swift_toolchain.compiler_flags)
# This allows us to avoid usage of absolute paths in generated PCM modules.
cmd.add([
"-working-directory",
".",
])
if swift_toolchain.resource_dir:
cmd.add([
"-resource-dir",
swift_toolchain.resource_dir,
])
cmd.add(sdk_deps_tset.project_as_args("clang_deps"))
cmd.add(pcm_deps_tset.project_as_args("clang_deps"))
modulemap_path = uncompiled_pcm_info.exported_preprocessor.modulemap_path
pcm_output = ctx.actions.declare_output(module_name + ".pcm")
additional_cmd = cmd_args(swift_cxx_args)
additional_cmd.add([
"-o",
pcm_output.as_output(),
modulemap_path,
])
# To correctly resolve modulemap's headers,
# a search path to the root of modulemap should be passed.
cmd.add([
"-Xcc",
"-I",
"-Xcc",
cmd_args(modulemap_path).parent(),
])
# Modular deps like `-Swift.h` have to be materialized.
cmd.hidden(uncompiled_pcm_info.exported_preprocessor.modular_args)
return (cmd, additional_cmd, pcm_output)

View File

@ -0,0 +1,26 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
SwiftPCMUncompiledInfo = provider(fields = [
"name",
"is_transient", # If True represents a transient apple_library target, that can't be compiled into pcm, but which we need to pass up for BUCK1 compatibility, because it can re-export some deps.
"exported_preprocessor", # CPreprocessor
"exported_deps", # ["dependency"]
"propagated_preprocessor_args_cmd", # cmd_args
"uncompiled_sdk_modules", # [str.type] a list of required sdk modules
])
# A tset can't be returned from the rule, so we need to wrap it into a provider.
WrappedSwiftPCMCompiledInfo = provider(fields = [
"tset", # Tset of `SwiftPCMCompiledInfo`
])
SwiftPCMCompiledInfo = provider(fields = [
"name",
"pcm_output", # artefact
"exported_preprocessor", # CPreprocessor which we need to keep around to be able to access modulemap path.
])

View File

@ -0,0 +1,203 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load("@prelude//apple:apple_utility.bzl", "expand_relative_prefixed_sdk_path", "get_disable_pch_validation_flags")
load(":apple_sdk_modules_utility.bzl", "SDKDepTSet", "get_compiled_sdk_deps_tset")
load(":swift_toolchain_types.bzl", "SdkCompiledModuleInfo", "SdkUncompiledModuleInfo", "WrappedSdkCompiledModuleInfo")
def get_shared_pcm_compilation_args(target: str.type, module_name: str.type) -> "cmd_args":
cmd = cmd_args()
cmd.add([
"-emit-pcm",
"-target",
target,
"-module-name",
module_name,
"-Xfrontend",
"-disable-implicit-swift-modules",
"-Xcc",
"-fno-implicit-modules",
"-Xcc",
"-fno-implicit-module-maps",
# Disable debug info in pcm files. This is required to avoid embedding absolute paths
# and ending up with mismatched pcm file sizes.
"-Xcc",
"-Xclang",
"-Xcc",
"-fmodule-format=raw",
# Embed all input files into the PCM so we don't need to include module map files when
# building remotely.
# https://github.com/apple/llvm-project/commit/fb1e7f7d1aca7bcfc341e9214bda8b554f5ae9b6
"-Xcc",
"-Xclang",
"-Xcc",
"-fmodules-embed-all-files",
# Embed all files that were read during compilation into the generated PCM.
"-Xcc",
"-Xclang",
"-Xcc",
"-fmodule-file-home-is-cwd",
# Once we have an empty working directory the compiler provided headers such as float.h
# cannot be found, so add . to the header search paths.
"-Xcc",
"-I.",
])
cmd.add(get_disable_pch_validation_flags())
return cmd
def _remove_path_components_from_right(path: str.type, count: int.type):
path_components = path.split("/")
removed_path = "/".join(path_components[0:-count])
return removed_path
def _add_sdk_module_search_path(cmd, uncompiled_sdk_module_info, apple_toolchain):
modulemap_path = uncompiled_sdk_module_info.input_relative_path
# If this input is a framework we need to search above the
# current framework location, otherwise we include the
# modulemap root.
if uncompiled_sdk_module_info.is_framework:
frameworks_dir_path = _remove_path_components_from_right(modulemap_path, 3)
expanded_path = expand_relative_prefixed_sdk_path(
cmd_args(apple_toolchain.swift_toolchain_info.sdk_path),
cmd_args(apple_toolchain.swift_toolchain_info.resource_dir),
cmd_args(apple_toolchain.platform_path),
frameworks_dir_path,
)
else:
module_root_path = _remove_path_components_from_right(modulemap_path, 1)
expanded_path = expand_relative_prefixed_sdk_path(
cmd_args(apple_toolchain.swift_toolchain_info.sdk_path),
cmd_args(apple_toolchain.swift_toolchain_info.resource_dir),
cmd_args(apple_toolchain.platform_path),
module_root_path,
)
cmd.add([
"-Xcc",
("-F" if uncompiled_sdk_module_info.is_framework else "-I"),
"-Xcc",
cmd_args(expanded_path),
])
def get_swift_sdk_pcm_anon_targets(
ctx: "context",
uncompiled_sdk_deps: ["dependency"],
swift_cxx_args: [str.type]):
deps = [
{
"dep": uncompiled_sdk_dep,
"sdk_pcm_name": uncompiled_sdk_dep[SdkUncompiledModuleInfo].name,
"swift_cxx_args": swift_cxx_args,
"_apple_toolchain": ctx.attrs._apple_toolchain,
}
for uncompiled_sdk_dep in uncompiled_sdk_deps
if SdkUncompiledModuleInfo in uncompiled_sdk_dep and not uncompiled_sdk_dep[SdkUncompiledModuleInfo].is_swiftmodule
]
return [(_swift_sdk_pcm_compilation, d) for d in deps]
def _swift_sdk_pcm_compilation_impl(ctx: "context") -> ["promise", ["provider"]]:
def k(sdk_pcm_deps_providers) -> ["provider"]:
uncompiled_sdk_module_info = ctx.attrs.dep[SdkUncompiledModuleInfo]
module_name = uncompiled_sdk_module_info.module_name
apple_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo]
swift_toolchain = apple_toolchain.swift_toolchain_info
cmd = cmd_args(swift_toolchain.compiler)
cmd.add(uncompiled_sdk_module_info.partial_cmd)
cmd.add(["-sdk", swift_toolchain.sdk_path])
cmd.add(swift_toolchain.compiler_flags)
if swift_toolchain.resource_dir:
cmd.add([
"-resource-dir",
swift_toolchain.resource_dir,
])
sdk_deps_tset = get_compiled_sdk_deps_tset(ctx, sdk_pcm_deps_providers)
cmd.add(sdk_deps_tset.project_as_args("clang_deps"))
expanded_modulemap_path_cmd = expand_relative_prefixed_sdk_path(
cmd_args(swift_toolchain.sdk_path),
cmd_args(swift_toolchain.resource_dir),
cmd_args(apple_toolchain.platform_path),
uncompiled_sdk_module_info.input_relative_path,
)
pcm_output = ctx.actions.declare_output(module_name + ".pcm")
cmd.add([
"-o",
pcm_output.as_output(),
expanded_modulemap_path_cmd,
])
# For SDK modules we need to set a few more args
cmd.add([
"-Xcc",
"-Xclang",
"-Xcc",
"-emit-module",
"-Xcc",
"-Xclang",
"-Xcc",
"-fsystem-module",
])
cmd.add(ctx.attrs.swift_cxx_args)
_add_sdk_module_search_path(cmd, uncompiled_sdk_module_info, apple_toolchain)
# T142915880 There is an issue with hard links,
# when we compile pcms remotely on linux machines.
local_only = True
ctx.actions.run(
cmd,
category = "sdk_swift_pcm_compile",
identifier = module_name,
local_only = local_only,
allow_cache_upload = local_only,
)
compiled_sdk = SdkCompiledModuleInfo(
name = uncompiled_sdk_module_info.name,
module_name = module_name,
is_framework = uncompiled_sdk_module_info.is_framework,
output_artifact = pcm_output,
is_swiftmodule = False,
input_relative_path = expanded_modulemap_path_cmd,
)
return [
DefaultInfo(),
WrappedSdkCompiledModuleInfo(
tset = ctx.actions.tset(SDKDepTSet, value = compiled_sdk, children = [sdk_deps_tset]),
),
]
# Skip deps compilations if run not on SdkUncompiledModuleInfo
if SdkUncompiledModuleInfo not in ctx.attrs.dep:
return []
# Recursively compile PCMs of any other exported_deps
sdk_pcm_anon_targets = get_swift_sdk_pcm_anon_targets(
ctx,
ctx.attrs.dep[SdkUncompiledModuleInfo].deps,
ctx.attrs.swift_cxx_args,
)
return ctx.actions.anon_targets(sdk_pcm_anon_targets).map(k)
_swift_sdk_pcm_compilation = rule(
impl = _swift_sdk_pcm_compilation_impl,
attrs = {
"dep": attrs.dep(),
"sdk_pcm_name": attrs.string(),
"swift_cxx_args": attrs.list(attrs.string(), default = []),
"_apple_toolchain": attrs.dep(),
},
)

View File

@ -0,0 +1,126 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load("@prelude//apple:apple_utility.bzl", "expand_relative_prefixed_sdk_path", "get_explicit_modules_env_var")
load(":apple_sdk_modules_utility.bzl", "SDKDepTSet", "get_compiled_sdk_deps_tset")
load(":swift_module_map.bzl", "write_swift_module_map")
load(":swift_sdk_pcm_compilation.bzl", "get_swift_sdk_pcm_anon_targets")
load(":swift_toolchain_types.bzl", "SdkCompiledModuleInfo", "SdkUncompiledModuleInfo", "WrappedSdkCompiledModuleInfo")
def get_swift_interface_anon_targets(
ctx: "context",
uncompiled_sdk_deps: ["dependency"],
swift_cxx_args: [str.type]):
deps = [
{
"dep": uncompiled_sdk_dep,
"sdk_swiftinterface_name": uncompiled_sdk_dep[SdkUncompiledModuleInfo].module_name,
"swift_cxx_args": swift_cxx_args,
"_apple_toolchain": ctx.attrs._apple_toolchain,
}
for uncompiled_sdk_dep in uncompiled_sdk_deps
if SdkUncompiledModuleInfo in uncompiled_sdk_dep and uncompiled_sdk_dep[SdkUncompiledModuleInfo].is_swiftmodule
]
return [(_swift_interface_compilation, d) for d in deps]
def _swift_interface_compilation_impl(ctx: "context") -> ["promise", ["provider"]]:
def k(sdk_deps_providers) -> ["provider"]:
uncompiled_sdk_module_info = ctx.attrs.dep[SdkUncompiledModuleInfo]
uncompiled_module_info_name = uncompiled_sdk_module_info.module_name
apple_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo]
swift_toolchain = apple_toolchain.swift_toolchain_info
cmd = cmd_args(swift_toolchain.compiler)
cmd.add(uncompiled_sdk_module_info.partial_cmd)
cmd.add(["-sdk", swift_toolchain.sdk_path])
if swift_toolchain.resource_dir:
cmd.add([
"-resource-dir",
swift_toolchain.resource_dir,
])
# `sdk_deps_providers` contains providers of direct SDK deps,
# as well as a provider that aggregates SDK deps coming from all transitive pcm deps.
sdk_deps_tset = get_compiled_sdk_deps_tset(ctx, sdk_deps_providers)
# FIXME: - Get rid of slow traversal here, and unify with two projections below.
swift_module_map_artifact = write_swift_module_map(ctx, uncompiled_module_info_name, list(sdk_deps_tset.traverse()))
cmd.add([
"-explicit-swift-module-map-file",
swift_module_map_artifact,
])
# sdk_swiftinterface_compile should explicitly depend on its deps that go to swift_modulemap
cmd.hidden(sdk_deps_tset.project_as_args("hidden"))
cmd.add(sdk_deps_tset.project_as_args("clang_deps"))
swiftmodule_output = ctx.actions.declare_output(uncompiled_module_info_name + ".swiftmodule")
expanded_swiftinterface_cmd = expand_relative_prefixed_sdk_path(
cmd_args(swift_toolchain.sdk_path),
cmd_args(swift_toolchain.resource_dir),
cmd_args(apple_toolchain.platform_path),
uncompiled_sdk_module_info.input_relative_path,
)
cmd.add([
"-o",
swiftmodule_output.as_output(),
expanded_swiftinterface_cmd,
])
ctx.actions.run(
cmd,
env = get_explicit_modules_env_var(True),
category = "sdk_swiftinterface_compile",
identifier = uncompiled_module_info_name,
)
compiled_sdk = SdkCompiledModuleInfo(
name = uncompiled_sdk_module_info.name,
module_name = uncompiled_module_info_name,
is_framework = uncompiled_sdk_module_info.is_framework,
is_swiftmodule = True,
output_artifact = swiftmodule_output,
input_relative_path = expanded_swiftinterface_cmd,
)
return [
DefaultInfo(),
WrappedSdkCompiledModuleInfo(
tset = ctx.actions.tset(SDKDepTSet, value = compiled_sdk, children = [sdk_deps_tset]),
),
]
# Skip deps compilations if run not on SdkUncompiledModuleInfo
if SdkUncompiledModuleInfo not in ctx.attrs.dep:
return []
sdk_pcm_deps_anon_targets = get_swift_sdk_pcm_anon_targets(
ctx,
ctx.attrs.dep[SdkUncompiledModuleInfo].deps,
ctx.attrs.swift_cxx_args,
)
# Recursively compile swiftinterface of any other exported_deps
sdk_swift_interface_anon_targets = get_swift_interface_anon_targets(
ctx,
ctx.attrs.dep[SdkUncompiledModuleInfo].deps,
ctx.attrs.swift_cxx_args,
)
return ctx.actions.anon_targets(sdk_pcm_deps_anon_targets + sdk_swift_interface_anon_targets).map(k)
_swift_interface_compilation = rule(
impl = _swift_interface_compilation_impl,
attrs = {
"dep": attrs.dep(),
"sdk_swiftinterface_name": attrs.string(),
"swift_cxx_args": attrs.list(attrs.string(), default = []),
"_apple_toolchain": attrs.dep(),
},
)

View File

@ -0,0 +1,72 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(":swift_toolchain_types.bzl", "SdkUncompiledModuleInfo", "SwiftToolchainInfo")
def traverse_sdk_modules_graph(
swift_sdk_module_name_to_deps: {str.type: "dependency"},
clang_sdk_module_name_to_deps: {str.type: "dependency"},
sdk_module_dep: "dependency"):
if SdkUncompiledModuleInfo not in sdk_module_dep:
return
uncompiled_sdk_module_info = sdk_module_dep[SdkUncompiledModuleInfo]
# If input_relative_path is None then this module represents a root node of SDK modules graph.
# In such case, we need to handle only its deps.
if uncompiled_sdk_module_info.input_relative_path == None:
for uncompiled_dep in uncompiled_sdk_module_info.deps:
traverse_sdk_modules_graph(swift_sdk_module_name_to_deps, clang_sdk_module_name_to_deps, uncompiled_dep)
return
# return if dep is already in dict
if uncompiled_sdk_module_info.is_swiftmodule and uncompiled_sdk_module_info.module_name in swift_sdk_module_name_to_deps:
return
elif not uncompiled_sdk_module_info.is_swiftmodule and uncompiled_sdk_module_info.module_name in clang_sdk_module_name_to_deps:
return
for uncompiled_dep in uncompiled_sdk_module_info.deps:
traverse_sdk_modules_graph(swift_sdk_module_name_to_deps, clang_sdk_module_name_to_deps, uncompiled_dep)
if uncompiled_sdk_module_info.is_swiftmodule:
swift_sdk_module_name_to_deps[uncompiled_sdk_module_info.module_name] = sdk_module_dep
else:
clang_sdk_module_name_to_deps[uncompiled_sdk_module_info.module_name] = sdk_module_dep
def swift_toolchain_impl(ctx):
# All Clang's PCMs need to be compiled with cxx flags of the target that imports them,
# because of that, we expose `dependency`s of SDK modules,
# which might be accessed from apple_library/apple_test rules and compiled there.
uncompiled_swift_sdk_modules_deps = {}
uncompiled_clang_sdk_modules_deps = {}
for sdk_module_dep in ctx.attrs.sdk_modules:
traverse_sdk_modules_graph(
uncompiled_swift_sdk_modules_deps,
uncompiled_clang_sdk_modules_deps,
sdk_module_dep,
)
return [
DefaultInfo(),
SwiftToolchainInfo(
architecture = ctx.attrs.architecture,
can_toolchain_emit_obj_c_header_textually = ctx.attrs.can_toolchain_emit_obj_c_header_textually,
# TODO(T99038725): until we add -debug-compilation-dir we need to wrap
# the Swift invocations so that we can apply a debug prefix map for
# the current directory while maintaining cache hit.
uncompiled_swift_sdk_modules_deps = uncompiled_swift_sdk_modules_deps,
uncompiled_clang_sdk_modules_deps = uncompiled_clang_sdk_modules_deps,
compiler = cmd_args(ctx.attrs._swiftc_wrapper[RunInfo]).add(ctx.attrs.swiftc[RunInfo]),
compiler_flags = ctx.attrs.swiftc_flags,
prefix_serialized_debugging_options = ctx.attrs.prefix_serialized_debug_info,
resource_dir = ctx.attrs.resource_dir,
sdk_path = ctx.attrs._internal_sdk_path or ctx.attrs.sdk_path,
swift_stdlib_tool = ctx.attrs.swift_stdlib_tool[RunInfo],
swift_stdlib_tool_flags = ctx.attrs.swift_stdlib_tool_flags,
runtime_run_paths = ctx.attrs.runtime_run_paths,
),
]

View File

@ -0,0 +1,54 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
#####################################################################
# Providers
SwiftToolchainInfo = provider(fields = [
"architecture",
"can_toolchain_emit_obj_c_header_textually", # bool
"uncompiled_swift_sdk_modules_deps", # {str.type: dependency} Expose deps of uncompiled Swift SDK modules.
"uncompiled_clang_sdk_modules_deps", # {str.type: dependency} Expose deps of uncompiled Clang SDK modules.
"compiler_flags",
"compiler",
"prefix_serialized_debugging_options", # bool
"resource_dir", # "artifact",
"sdk_path",
"swift_stdlib_tool_flags",
"swift_stdlib_tool",
"runtime_run_paths", # [str.type]
])
# A provider that represents a non-yet-compiled SDK (Swift or Clang) module,
# and doesn't contain any artifacts because Swift toolchain isn't resolved yet.
SdkUncompiledModuleInfo = provider(fields = [
"name", # A name of a module with `.swift`/`.clang` suffix.
"module_name", # A real name of a module, without distinguishing suffixes.
"is_framework", # This is mostly needed for the generated Swift module map file.
"is_swiftmodule", # If True then represents a swiftinterface, otherwise Clang's modulemap.
"partial_cmd", # Partial arguments, required to compile a particular SDK module.
"input_relative_path", # A relative prefixed path to a textual swiftinterface/modulemap file within an SDK.
"deps", # ["dependency"]
])
WrappedSdkCompiledModuleInfo = provider(fields = [
"tset", # A tset that contains SdkCompiledModuleInfo itself and its transitive deps
])
# A provider that represents an already-compiled SDK (Swift or Clang) module.
SdkCompiledModuleInfo = provider(fields = [
"name", # A name of a module with `.swift`/`.clang` suffix.
"module_name", # A real name of a module, without distinguishing suffixes.
"is_swiftmodule", # If True then contains a compiled swiftmodule, otherwise Clang's pcm.
"is_framework",
"output_artifact", # Compiled artifact either swiftmodule or pcm.
"input_relative_path",
])
SdkSwiftOverlayInfo = provider(fields = [
"overlays", # {str.type: [str.type]} A mapping providing a list of overlay module names for each underlying module
])

30
prelude/apple/tools/BUCK Normal file
View File

@ -0,0 +1,30 @@
prelude = native
prelude.python_bootstrap_binary(
name = "make_modulemap",
main = "make_modulemap.py",
visibility = ["PUBLIC"],
)
prelude.export_file(
name = "swift_exec.sh",
src = "swift_exec.sh",
)
prelude.command_alias(
name = "swift_exec",
exe = ":swift_exec.sh",
visibility = ["PUBLIC"],
)
prelude.python_bootstrap_binary(
name = "make_vfsoverlay",
main = "make_vfsoverlay.py",
visibility = ["PUBLIC"],
)
prelude.python_bootstrap_binary(
name = "swift_objc_header_postprocess",
main = "swift_objc_header_postprocess.py",
visibility = ["PUBLIC"],
)

View File

@ -0,0 +1,154 @@
#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
import argparse
import os
import re
from io import TextIOWrapper
from typing import Dict, Iterable, List
class Module:
def __init__(self, name: str) -> None:
self.name: str = name
self.headers: List[str] = []
self.submodules: Dict[str, Module] = {}
def add_header(self, src: str) -> None:
self.headers.append(src)
def get_submodule(self, name: str) -> "Module":
if name not in self.submodules:
self.submodules[name] = Module(name)
return self.submodules[name]
def render(self, f: TextIOWrapper, path_prefix: str, indent: int = 0) -> None:
space = " " * indent
f.write(f"{space}module {self.name} {{\n")
submodule_names = set()
for submodule_name in sorted(self.submodules.keys()):
submodule = self.submodules[submodule_name]
# remove any extensions for readability
sanitized_name = os.path.splitext(submodule_name)[0]
# module names can only be ascii or _
sanitized_name = re.sub(r"[^A-Za-z0-9_]", "_", sanitized_name)
if sanitized_name[0].isdigit():
sanitized_name = "_" + sanitized_name
# avoid any collisions with other files
while sanitized_name in submodule_names:
sanitized_name += "_"
submodule_names.add(sanitized_name)
submodule.name = sanitized_name
submodule.render(f, path_prefix, indent + 4)
header_space = " " * (indent + 4)
for h in sorted(self.headers):
f.write(f'{header_space}header "{os.path.join(path_prefix, h)}"\n')
if self.headers:
f.write(f"{header_space}export *\n")
f.write(f"{space}}}\n")
def _write_single_module(
f: TextIOWrapper, name: str, headers: Iterable[str], path_prefix: str
) -> None:
module = Module(name)
for h in headers:
module.add_header(h)
module.render(f, path_prefix)
def _write_submodules(
f: TextIOWrapper, name: str, headers: Iterable[str], path_prefix: str
) -> None:
# Create a tree of nested modules, one for each path component.
root_module = Module(name)
for h in headers:
module = root_module
for i, component in enumerate(h.split(os.sep)):
if i == 0 and component == name:
# The common case is we have a singe header path prefix that matches the module name.
# In this case we add the headers directly to the root module.
pass
else:
module = module.get_submodule(component)
module.add_header(h)
root_module.render(f, path_prefix)
def _write_swift_header(f: TextIOWrapper, name: str, swift_header_path: str) -> None:
f.write(
f"""
module {name}.Swift {{
header "{swift_header_path}"
requires objc
}}
"""
)
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument(
"--output", required=True, help="The path to write the modulemap to"
)
parser.add_argument("--name", required=True, help="The name of the module")
parser.add_argument(
"--swift-header", help="If this is a mixed module extend with this Swift header"
)
parser.add_argument(
"--use-submodules",
action="store_true",
help="If set produce a modulemap with per-header submodules",
)
parser.add_argument(
"--symlink-tree",
required=True,
)
parser.add_argument(
"mappings", nargs="*", default=[], help="A list of import paths"
)
args = parser.parse_args()
output_dir = os.path.dirname(args.output)
path_prefix = os.path.relpath(args.symlink_tree, output_dir)
with open(args.output, "w") as f:
if args.use_submodules:
# pyre-fixme[6]: For 4th param expected `str` but got `bytes`.
_write_submodules(f, args.name, args.mappings, path_prefix)
else:
# pyre-fixme[6]: For 4th param expected `str` but got `bytes`.
_write_single_module(f, args.name, args.mappings, path_prefix)
if args.swift_header:
swift_header_name = os.path.relpath(args.swift_header, output_dir)
_write_swift_header(
f,
args.name,
os.path.join(
"swift-extended_symlink_tree",
args.name,
str(swift_header_name),
),
)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,95 @@
#!/usr/bin/env fbpython
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
import argparse
import itertools
import json
import os
from typing import Dict, List, Tuple, TypedDict
# Example VFS overlay in JSON format
# ----------------------------------
# {
# 'version': 0,
# 'roots': [
# { 'name': 'OUT_DIR', 'type': 'directory',
# 'contents': [
# { 'name': 'module.map', 'type': 'file',
# 'external-contents': 'INPUT_DIR/actual_module2.map'
# }
# ]
# }
# ]
# }
class OverlayRoot(TypedDict):
name: str
type: str
contents: List[Dict[str, str]]
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument(
"--output", required=True, help="The path to write the VFS overlay to"
)
parser.add_argument(
"mappings", nargs="*", default=[], help="A list of virtual paths to real paths"
)
args = parser.parse_args()
if len(args.mappings) % 2 != 0:
parser.error("mappings must be dest-source pairs")
# Group the mappings by containing directory
mappings: Dict[str, List[Tuple[str, str]]] = {}
for src, dst in itertools.zip_longest(*([iter(args.mappings)] * 2)):
folder, basename = os.path.split(src)
mappings.setdefault(folder, []).append((basename, dst))
with open(args.output, "w") as f:
json.dump(
{
"version": 0,
"roots": _get_roots(mappings),
},
f,
sort_keys=True,
indent=4,
)
f.write("\n")
f.flush()
def _get_roots(mappings: Dict[str, List[Tuple[str, str]]]) -> List[OverlayRoot]:
roots = []
for folder, file_maps in mappings.items():
contents = []
for src, dst in file_maps:
contents.append(
{
"name": src,
"type": "file",
"external-contents": dst,
}
)
roots.append(
{
"name": folder,
"type": "directory",
"contents": contents,
}
)
return roots
if __name__ == "__main__":
main()

View File

@ -0,0 +1,61 @@
#!/bin/bash
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
set -e
# - Apply a debug prefix map for the current directory
# to make debug info relocatable.
# - Use $TMPDIR for the module cache location. This
# will be set to a unique location for each RE action
# which will avoid sharing modules across RE actions.
# This is necessary as the inputs to the modules will
# be transient and can be removed at any point, causing
# module validation errors to fail builds.
if [ -n "$INSIDE_RE_WORKER" ]; then
MODULE_CACHE_PATH="$TMPDIR/module-cache"
else
# When building locally we can use a shared module
# cache as the inputs should remain at a fixed
# location.
MODULE_CACHE_PATH="/tmp/buck-module-cache"
fi
module_cache_path_args=()
if [ -z "$EXPLICIT_MODULES_ENABLED" ]; then
module_cache_path_args+=("-module-cache-path")
module_cache_path_args+=("$MODULE_CACHE_PATH")
fi
"$@" -debug-prefix-map "$PWD"=. "${module_cache_path_args[@]}"
OUTPUT_PATHS=()
for ARG in "$@"
do
if [ "${FOUND_OUTPUT_ARG}" = 1 ]; then
OUTPUT_PATHS+=( "${ARG}" )
fi
FOUND_OUTPUT_ARG=0
if [ "${ARG}" = "-o" ] || [ "${ARG}" = "-emit-module-path" ] || [ "${ARG}" = "-emit-objc-header-path" ]; then
FOUND_OUTPUT_ARG=1
fi
done
if [ ${#OUTPUT_PATHS[@]} -eq 0 ]; then
>&2 echo "No output paths found, ensure output args are not passed in argfiles"
exit 1
fi
for OUTPUT_PATH in "${OUTPUT_PATHS[@]}"
do
# We've observed cases where the Swift compiler would return with a zero exit code but
# would not have created the output file, correctly a non-zero exit code in such cases.
if [ ! -f "${OUTPUT_PATH}" ]; then
>&2 echo "Output file does not exist: '${OUTPUT_PATH}'"
exit 1
fi
done

View File

@ -0,0 +1,312 @@
#!/usr/bin/env fbpython
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
import argparse
import json
import os
import re
import sys
from typing import Dict, Iterable, TextIO
# Out-of-date? Update with this command:
#
# xcode-select --print-path | xargs printf '%s/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator.sdk/System/Library/Frameworks/' | xargs ls | rg '^([A-Z].+)\.framework$' -r '${1}' | xargs printf ' "%s",\n' && xcode-select --print-path | xargs printf '%s/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator.sdk/usr/include/module.modulemap' | xargs cat | rg '^module ([a-zA-Z0-9_]*) .*$' -r '${1}'| xargs printf ' "%s",\n'
APPLE_SYSTEM_MODULES = {
"ARKit",
"AVFAudio",
"AVFoundation",
"AVKit",
"Accelerate",
"Accessibility",
"Accounts",
"AdServices",
"AdSupport",
"AddressBook",
"AddressBookUI",
"AppClip",
"AppTrackingTransparency",
"AssetsLibrary",
"AudioToolbox",
"AudioUnit",
"AuthenticationServices",
"AutomaticAssessmentConfiguration",
"BackgroundTasks",
"BusinessChat",
"CFNetwork",
"CallKit",
"CarPlay",
"ClassKit",
"ClockKit",
"CloudKit",
"Combine",
"Contacts",
"ContactsUI",
"CoreAudio",
"CoreAudioKit",
"CoreAudioTypes",
"CoreBluetooth",
"CoreData",
"CoreFoundation",
"CoreGraphics",
"CoreHaptics",
"CoreImage",
"CoreLocation",
"CoreLocationUI",
"CoreMIDI",
"CoreML",
"CoreMedia",
"CoreMotion",
"CoreNFC",
"CoreServices",
"CoreSpotlight",
"CoreTelephony",
"CoreText",
"CoreVideo",
"CryptoKit",
"CryptoTokenKit",
"DataDetection",
"DeveloperToolsSupport",
"DeviceActivity",
"DeviceCheck",
"EventKit",
"EventKitUI",
"ExposureNotification",
"ExternalAccessory",
"FamilyControls",
"FileProvider",
"FileProviderUI",
"Foundation",
"GLKit",
"GSS",
"GameController",
"GameKit",
"GameplayKit",
"GroupActivities",
"HealthKit",
"HealthKitUI",
"HomeKit",
"IOKit",
"IOSurface",
"IdentityLookup",
"IdentityLookupUI",
"ImageCaptureCore",
"ImageIO",
"Intents",
"IntentsUI",
"JavaScriptCore",
"LinkPresentation",
"LocalAuthentication",
"ManagedSettings",
"ManagedSettingsUI",
"MapKit",
"MediaAccessibility",
"MediaPlayer",
"MediaToolbox",
"MessageUI",
"Messages",
"Metal",
"MetalKit",
"MetalPerformanceShaders",
"MetalPerformanceShadersGraph",
"MetricKit",
"MobileCoreServices",
"ModelIO",
"MultipeerConnectivity",
"MusicKit",
"NaturalLanguage",
"NearbyInteraction",
"Network",
"NetworkExtension",
"NewsstandKit",
"NotificationCenter",
"OSLog",
"OpenAL",
"OpenGLES",
"PDFKit",
"PHASE",
"PassKit",
"PencilKit",
"Photos",
"PhotosUI",
"PushKit",
"QuartzCore",
"QuickLook",
"QuickLookThumbnailing",
"RealityFoundation",
"RealityKit",
"ReplayKit",
"SafariServices",
"SceneKit",
"ScreenTime",
"Security",
"SensorKit",
"ShazamKit",
"Social",
"SoundAnalysis",
"Speech",
"SpriteKit",
"StoreKit",
"SwiftUI",
"SystemConfiguration",
"TabularData",
"Twitter",
"UIKit",
"UniformTypeIdentifiers",
"UserNotifications",
"UserNotificationsUI",
"VideoSubscriberAccount",
"VideoToolbox",
"Vision",
"VisionKit",
"WatchConnectivity",
"WebKit",
"WidgetKit",
"AppleTextureEncoder",
"Compression",
"Darwin",
"asl",
"dnssd",
"os",
"os_object",
"os_workgroup",
"libkern",
"notify",
"zlib",
"SQLite3",
}
APPLE_TEST_FRAMEWORKS = {
"XCTest",
}
# These modules require specific handling, as they do not have an umbrella
# header that matches the module name, as typical Apple frameworks do.
APPLE_SYSTEM_MODULE_OVERRIDES = {
"Dispatch": ("dispatch", ("dispatch.h",)),
"ObjectiveC": ("objc", ("runtime.h",)),
}
def write_imports_for_headers(out: TextIO, prefix: str, headers: Iterable[str]) -> None:
for header in headers:
print(f"#import <{prefix}/{header}>", file=out)
def write_imports_for_modules(
out: TextIO,
postprocessing_module_name: str,
modules: Iterable[str],
deps: Dict[str, Iterable[str]],
) -> None:
# We only include the traditional textual imports when modules are disabled, so
# that the behavior with modules enabled is identical to the behavior without
# the postprocessing.
print("#else", file=out)
for module in modules:
if headers := deps.get(module):
write_imports_for_headers(out, module, headers)
elif override := APPLE_SYSTEM_MODULE_OVERRIDES.get(module):
write_imports_for_headers(out, override[0], override[1])
elif module in APPLE_SYSTEM_MODULES or module in APPLE_TEST_FRAMEWORKS:
# When we don't have an explicit override for the module, we use the module's
# name as an umbrella header. This is used for typical Apple frameworks like
# Foundation and UIKit.
write_imports_for_headers(out, module, (f"{module}.h",))
else:
print(
f"""
The module "{module}" was imported as a dependency of Swift code in "{postprocessing_module_name}", but could not be mapped to a list of header imports by Buck's Swift header postprocessing. There are two possibilities:
1. If "{module}" is an internal library, it is likely that the exported_deps of "{postprocessing_module_name}" are incorrect. Try fixing them manually or with "arc fixmydeps". This is the most likely issue.
2. If "{module}" is a system (Apple) framework, the list of Apple system modules in {os.path.basename(__file__)} is out-of-date. There is a command to fix it in that file. This issue is unlikely.
""",
file=sys.stderr,
)
sys.exit(1)
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("header")
parser.add_argument("deps")
parser.add_argument("out")
args = parser.parse_args()
with open(args.deps) as f:
deps = json.load(f)
# Strips the suffix from the header name, leaving us with just the name
# of the module that we are postprocessing the header for. This is used
# for error reporting.
postprocessing_module_name = os.path.basename(args.header).split("-")[0]
# The Swift compiler's output looks like this for Swift5.8:
#
# #if __has_feature(objc_modules)
# #if __has_warning("-Watimport-in-framework-header")
# #pragma clang diagnostic ignored "-Watimport-in-framework-header"
# #endif
# @import ModuleA;
# @import ModuleB;
# @import ModuleC;
# #endif
#
# The implementation here balances being somewhat flexible to changes to the compiler's
# output, unlikely though they may be, with avoiding adding too much complexity and getting
# too close to implementing a full parser for Objective-C un-preprocessed header files.
with open(args.header) as header, open(args.out, "w") as out:
# When this is None, it means that we are still searching for the start of the conditional
# @import block in the generated header.
modules = None
# The Swift compiler emits an additional #if gate inside the conditional @import block, so
# we need to track whether we're in a further nested conditional so that we know when the
# main conditional block has ended.
if_level = 0
for line in header:
line = line.rstrip("\n")
# When the modules has not been set, we are still searching for the start of the
# modules @import section.
if modules is None:
# The line changed from __has_feature(modules) to __has_feature(objc_modules) between Swift5.7 and Swift5.8.
# For the time being, we need to check for either to support both Xcode14.2 and Xcode14.3 onwards.
if (
line == "#if __has_feature(objc_modules)"
or line == "#if __has_feature(modules)"
):
modules = []
if_level = 1
else:
if line.startswith("@import"):
# Splitting on:
# "@import ": to separate from the @import.
# Semicolon and period: to separate the main module name from submodules or EOL.
# The module name will then be the first item.
modules.append(re.split(r"@import |[;.]", line)[1])
elif line.startswith("#if"):
# This allows us to handle the Clang diagnostic #if block that the compiler inserts
# within the main #if block for modules.
if_level += 1
elif line.startswith("#endif"):
if_level -= 1
if if_level == 0:
write_imports_for_modules(
out,
postprocessing_module_name,
modules,
deps,
)
modules = None
print(line, file=out)
if __name__ == "__main__":
main()

View File

@ -0,0 +1,52 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_bundle_resources.bzl", "get_apple_bundle_resource_part_list")
load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo")
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo")
load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec")
load("@prelude//decls/ios_rules.bzl", "AppleBundleExtension")
load(":resource_group_map.bzl", "resource_group_map_attr")
def _get_apple_resources_toolchain_attr():
# FIXME: prelude// should be standalone (not refer to fbcode//)
return attrs.toolchain_dep(default = "fbcode//buck2/platform/toolchain:apple-resources", providers = [AppleToolchainInfo])
def _impl(ctx: "context") -> ["provider"]:
resource_output = get_apple_bundle_resource_part_list(ctx)
return [
DefaultInfo(),
AppleBundleResourceInfo(
resource_output = resource_output,
),
]
registration_spec = RuleRegistrationSpec(
name = "apple_resource_bundle",
impl = _impl,
attrs = {
"asset_catalogs_compilation_options": attrs.dict(key = attrs.string(), value = attrs.any(), default = {}),
"binary": attrs.option(attrs.dep(), default = None),
"deps": attrs.list(attrs.dep(), default = []),
"extension": attrs.one_of(attrs.enum(AppleBundleExtension), attrs.string()),
"ibtool_flags": attrs.option(attrs.list(attrs.string()), default = None),
"ibtool_module_flag": attrs.option(attrs.bool(), default = None),
"info_plist": attrs.source(),
"info_plist_substitutions": attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False, default = {}),
"product_name": attrs.option(attrs.string(), default = None),
"resource_group": attrs.option(attrs.string(), default = None),
"resource_group_map": resource_group_map_attr(),
# Only include macOS hosted toolchains, so we compile resources directly on Mac RE
"_apple_toolchain": _get_apple_resources_toolchain_attr(),
# FIXME: prelude// should be standalone (not refer to fbsource//)
"_apple_tools": attrs.exec_dep(default = "fbsource//xplat/buck2/platform/apple:apple-tools", providers = [AppleToolsInfo]),
# Because `apple_resource_bundle` is a proxy for `apple_bundle`, we need to get `name`
# field of the `apple_bundle`, as it's used as a fallback value in Info.plist.
"_bundle_target_name": attrs.string(),
"_compile_resources_locally_override": attrs.option(attrs.bool(), default = None),
},
)

View File

@ -0,0 +1,119 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolsInfo")
load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec")
load(
"@prelude//utils:build_target_pattern.bzl",
"parse_build_target_pattern",
)
AppleSelectiveDebuggingInfo = provider(fields = [
"scrub_binary", # function
"include_build_target_patterns", # BuildTargetPattern.type
"include_regular_expressions", # regex
"exclude_build_target_patterns", # BuildTargetPattern.type
"exclude_regular_expressions", # regex
])
# The type of selective debugging json input to utilze.
_SelectiveDebuggingJsonTypes = [
# Use a targets json file containing all targets to include.
"targets",
# Use a spec json file specifying the targets to include
# and exclude via build target patterns and regular expressions.
"spec",
]
_SelectiveDebuggingJsonType = enum(
_SelectiveDebuggingJsonTypes[0],
_SelectiveDebuggingJsonTypes[1],
)
def _impl(ctx: "context") -> ["provider"]:
json_type = _SelectiveDebuggingJsonType(ctx.attrs.json_type)
# process inputs and provide them up the graph with typing
include_build_target_patterns = [parse_build_target_pattern(pattern) for pattern in ctx.attrs.include_build_target_patterns]
include_regular_expressions = [experimental_regex(expression) for expression in ctx.attrs.include_regular_expressions]
exclude_build_target_patterns = [parse_build_target_pattern(pattern) for pattern in ctx.attrs.exclude_build_target_patterns]
exclude_regular_expressions = [experimental_regex(expression) for expression in ctx.attrs.exclude_regular_expressions]
scrubber = ctx.attrs._apple_tools[AppleToolsInfo].selective_debugging_scrubber
cmd = cmd_args(scrubber)
if json_type == _SelectiveDebuggingJsonType("targets"):
# If a targets json file is not provided, write an empty json file:
targets_json_file = ctx.attrs.targets_json_file or ctx.actions.write_json("targets_json.txt", {"targets": []})
cmd.add("--targets-file")
cmd.add(targets_json_file)
elif json_type == _SelectiveDebuggingJsonType("spec"):
json_data = {
"exclude_build_target_patterns": ctx.attrs.exclude_build_target_patterns,
"exclude_regular_expressions": ctx.attrs.exclude_regular_expressions,
"include_build_target_patterns": ctx.attrs.include_build_target_patterns,
"include_regular_expressions": ctx.attrs.include_regular_expressions,
}
spec_file = ctx.actions.write_json("selective_debugging_spec.json", json_data)
cmd.add("--spec-file")
cmd.add(spec_file)
else:
fail("Expected json_type to be either `targets` or `spec`.")
def scrub_binary(inner_ctx, executable: "artifact") -> "artifact":
inner_cmd = cmd_args(cmd)
output = inner_ctx.actions.declare_output("debug_scrubbed/{}".format(executable.short_path))
inner_cmd.add(["--input", executable])
inner_cmd.add(["--output", output.as_output()])
inner_ctx.actions.run(inner_cmd, category = "scrub_binary", identifier = executable.short_path)
return output
return [
DefaultInfo(),
AppleSelectiveDebuggingInfo(
scrub_binary = scrub_binary,
include_build_target_patterns = include_build_target_patterns,
include_regular_expressions = include_regular_expressions,
exclude_build_target_patterns = exclude_build_target_patterns,
exclude_regular_expressions = exclude_regular_expressions,
),
]
registration_spec = RuleRegistrationSpec(
name = "apple_selective_debugging",
impl = _impl,
attrs = {
"exclude_build_target_patterns": attrs.list(attrs.string(), default = []),
"exclude_regular_expressions": attrs.list(attrs.string(), default = []),
"include_build_target_patterns": attrs.list(attrs.string(), default = []),
"include_regular_expressions": attrs.list(attrs.string(), default = []),
"json_type": attrs.enum(_SelectiveDebuggingJsonTypes),
"targets_json_file": attrs.option(attrs.source(), default = None),
"_apple_tools": attrs.exec_dep(default = "fbsource//xplat/buck2/platform/apple:apple-tools", providers = [AppleToolsInfo]),
},
)
def filter_debug_info(debug_info: "transitive_set_iterator", selective_debugging_info: AppleSelectiveDebuggingInfo.type) -> ["artifact"]:
selected_debug_info = []
for info in debug_info:
if selective_debugging_info.include_build_target_patterns or selective_debugging_info.include_regular_expressions:
is_included = _check_if_label_matches_patterns_or_expressions(info.label, selective_debugging_info.include_build_target_patterns, selective_debugging_info.include_regular_expressions)
else:
is_included = True
if is_included and not _check_if_label_matches_patterns_or_expressions(info.label, selective_debugging_info.exclude_build_target_patterns, selective_debugging_info.exclude_regular_expressions):
selected_debug_info.extend(info.artifacts)
return selected_debug_info
def _check_if_label_matches_patterns_or_expressions(label: "label", patterns: ["BuildTargetPattern"], expressions: ["regex"]) -> bool.type:
for pattern in patterns:
if pattern.matches(label):
return True
for expression in expressions:
if expression.match(str(label)):
return True
return False

View File

@ -0,0 +1,53 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo")
load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo")
load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec")
def _impl(ctx: "context") -> ["provider"]:
base = ctx.attrs.base[AppleToolchainInfo]
cxx_toolchain_override = ctx.attrs.cxx_toolchain[CxxToolchainInfo]
return [
DefaultInfo(),
AppleToolchainInfo(
actool = base.actool,
ibtool = base.ibtool,
dsymutil = base.dsymutil,
dwarfdump = base.dwarfdump,
lipo = base.lipo,
cxx_platform_info = base.cxx_platform_info,
cxx_toolchain_info = cxx_toolchain_override if cxx_toolchain_override != None else base.cxx_toolchain_info,
codesign = base.codesign,
codesign_allocate = base.codesign_allocate,
compile_resources_locally = base.compile_resources_locally,
installer = base.installer,
libtool = base.libtool,
momc = base.momc,
min_version = base.min_version,
xctest = base.xctest,
platform_path = base.platform_path,
sdk_name = base.sdk_name,
sdk_path = base.sdk_path,
sdk_version = base.sdk_version,
sdk_build_version = base.sdk_build_version,
swift_toolchain_info = base.swift_toolchain_info,
watch_kit_stub_binary = base.watch_kit_stub_binary,
xcode_version = base.xcode_version,
xcode_build_version = base.xcode_build_version,
),
]
registration_spec = RuleRegistrationSpec(
name = "apple_toolchain_override",
impl = _impl,
attrs = {
"base": attrs.toolchain_dep(providers = [AppleToolchainInfo]),
"cxx_toolchain": attrs.toolchain_dep(providers = [CxxToolchainInfo]),
},
is_toolchain_rule = True,
)

View File

@ -0,0 +1,40 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolsInfo")
load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec")
def _impl(ctx: "context") -> ["provider"]:
return [
DefaultInfo(),
AppleToolsInfo(
assemble_bundle = ctx.attrs.assemble_bundle[RunInfo],
dry_codesign_tool = ctx.attrs.dry_codesign_tool[RunInfo],
info_plist_processor = ctx.attrs.info_plist_processor[RunInfo],
make_modulemap = ctx.attrs.make_modulemap[RunInfo],
make_vfsoverlay = ctx.attrs.make_vfsoverlay[RunInfo],
selective_debugging_scrubber = ctx.attrs.selective_debugging_scrubber[RunInfo],
swift_objc_header_postprocess = ctx.attrs.swift_objc_header_postprocess[RunInfo],
),
]
# The `apple_tools` rule exposes a set of supplementary tools
# required by the Apple rules _internally_. Such tools are not
# toolchain/SDK specific, they're just internal helper tools.
registration_spec = RuleRegistrationSpec(
name = "apple_tools",
impl = _impl,
attrs = {
"assemble_bundle": attrs.dep(providers = [RunInfo]),
"dry_codesign_tool": attrs.dep(providers = [RunInfo]),
"info_plist_processor": attrs.dep(providers = [RunInfo]),
"make_modulemap": attrs.dep(providers = [RunInfo]),
"make_vfsoverlay": attrs.dep(providers = [RunInfo]),
"selective_debugging_scrubber": attrs.dep(providers = [RunInfo]),
"swift_objc_header_postprocess": attrs.dep(providers = [RunInfo]),
},
)

Some files were not shown because too many files have changed in this diff Show More