From ab462fb8d9601bce9f9f15c30a0e48fd4b7b281e Mon Sep 17 00:00:00 2001 From: Justin Carter Date: Mon, 23 Oct 2023 13:44:44 +0200 Subject: [PATCH] chore: bump flake (#3411) --- .github/labeler.yml | 3 + flake.lock | 25 +++++- flake.nix | 4 + .../android_binary_native_library_rules.bzl | 40 +++++---- prelude/android/android_providers.bzl | 6 +- prelude/android/tools/unpack_aar.py | 3 +- prelude/apple/apple_binary.bzl | 17 +++- prelude/apple/apple_bundle.bzl | 14 +++- prelude/apple/apple_bundle_macro_layer.bzl | 19 ----- prelude/apple/apple_bundle_part.bzl | 6 +- prelude/apple/apple_genrule_deps.bzl | 47 +++++++++++ prelude/apple/apple_library.bzl | 6 ++ prelude/apple/apple_macro_layer.bzl | 30 +++++++ prelude/apple/apple_resource_bundle.bzl | 3 +- prelude/apple/apple_rules_impl.bzl | 13 ++- prelude/apple/apple_rules_impl_utility.bzl | 13 ++- prelude/apple/apple_test_macro_layer.bzl | 25 ------ prelude/apple/swift/swift_compilation.bzl | 49 ++++++++++- prelude/apple/swift/swift_pcm_compilation.bzl | 2 +- .../apple/swift/swift_sdk_pcm_compilation.bzl | 3 +- .../swift_sdk_swiftinterface_compilation.bzl | 2 +- prelude/apple/swift/swift_toolchain.bzl | 1 + prelude/apple/swift/swift_toolchain_types.bzl | 1 + prelude/apple/tools/BUCK | 6 ++ prelude/apple/tools/make_swift_comp_db.py | 62 ++++++++++++++ prelude/csharp/csharp.bzl | 2 +- prelude/cxx/attr_selection.bzl | 2 +- prelude/cxx/compile.bzl | 28 +++++-- prelude/cxx/cxx.bzl | 4 +- prelude/cxx/cxx_bolt.bzl | 45 +---------- prelude/cxx/cxx_executable.bzl | 12 ++- prelude/cxx/cxx_library.bzl | 10 +-- prelude/cxx/cxx_toolchain.bzl | 4 +- prelude/cxx/cxx_types.bzl | 2 + prelude/cxx/link.bzl | 49 +++++++---- prelude/cxx/linker.bzl | 5 +- prelude/cxx/omnibus.bzl | 8 ++ prelude/cxx/shared_library_interface.bzl | 14 ++-- prelude/cxx/symbols.bzl | 15 ++-- prelude/debugging/fdb.bxl | 12 +-- prelude/debugging/inspect_dbg_exec.bzl | 2 +- prelude/debugging/inspect_default.bzl | 2 +- prelude/debugging/inspect_java.bzl | 2 +- prelude/decls/common.bzl | 6 +- prelude/decls/genrule_common.bzl | 3 +- .../test_exec/src/ct_daemon_hooks.erl | 81 +++++++++++++++---- prelude/genrule.bzl | 33 ++++++-- prelude/genrule_types.bzl | 12 +++ prelude/go/cgo_library.bzl | 21 ++++- prelude/go/compile.bzl | 4 +- prelude/go/go_library.bzl | 4 +- prelude/go/go_test.bzl | 2 + prelude/go/link.bzl | 23 ++++-- prelude/go/packages.bzl | 9 ++- prelude/go/toolchain.bzl | 15 ++-- prelude/go/tools/cgo_wrapper.py | 10 ++- prelude/haskell/haskell.bzl | 19 +++-- prelude/java/class_to_srcs.bzl | 15 ++-- prelude/julia/julia_test.bzl | 2 + prelude/linking/lto.bzl | 2 - prelude/linking/strip.bzl | 15 ++-- prelude/native.bzl | 4 +- prelude/ocaml/ocaml.bzl | 51 +++++++++--- prelude/python/cxx_python_extension.bzl | 2 +- prelude/python/make_py_package.bzl | 66 ++++++++------- prelude/python/python_binary.bzl | 4 +- prelude/python/sourcedb/build.bxl | 2 +- prelude/python/sourcedb/classic.bxl | 4 +- prelude/python/sourcedb/code_navigation.bxl | 4 +- prelude/python/sourcedb/merge.bxl | 2 +- prelude/python/sourcedb/query.bxl | 10 +-- .../python/tools/make_par/sitecustomize.py | 20 +++++ .../python/tools/make_py_package_inplace.py | 20 ++++- .../tools/make_py_package_manifest_module.py | 26 +++--- prelude/python/tools/run_inplace.py.in | 9 ++- prelude/python/tools/run_inplace_lite.py.in | 3 + prelude/rules_impl.bzl | 15 ++-- prelude/rust/build.bzl | 26 +++++- prelude/rust/rust-analyzer/resolve_deps.bxl | 1 + prelude/rust/rust_binary.bzl | 2 +- prelude/rust/rust_library.bzl | 20 ++--- prelude/rust/tools/buildscript_run.py | 43 +++++++--- prelude/rust/tools/failure_filter_action.py | 11 +-- prelude/test/inject_test_run_info.bzl | 4 +- prelude/toolchains/demo.bzl | 54 +++++++++++++ prelude/toolchains/haskell.bzl | 29 +++++++ prelude/toolchains/ocaml.bzl | 5 +- prelude/utils/graph_utils.bzl | 35 ++++---- vendir.lock.yml | 7 +- vendir.yml | 2 +- 90 files changed, 976 insertions(+), 399 deletions(-) delete mode 100644 prelude/apple/apple_bundle_macro_layer.bzl create mode 100644 prelude/apple/apple_genrule_deps.bzl delete mode 100644 prelude/apple/apple_test_macro_layer.bzl create mode 100755 prelude/apple/tools/make_swift_comp_db.py create mode 100644 prelude/genrule_types.bzl create mode 100644 prelude/toolchains/demo.bzl create mode 100644 prelude/toolchains/haskell.bzl diff --git a/.github/labeler.yml b/.github/labeler.yml index 4f3afccbeb..088b63a961 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -4,12 +4,15 @@ ci: dashboard: - apps/dashboard/* + - flake.lock - pnpm-lock.yaml consent: - apps/consent/* + - flake.lock - pnpm-lock.yaml core: - core/**/* + - flake.lock - pnpm-lock.yaml diff --git a/flake.lock b/flake.lock index ec05d05d13..48bd2335db 100644 --- a/flake.lock +++ b/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1696757521, - "narHash": "sha256-cfgtLNCBLFx2qOzRLI6DHfqTdfWI+UbvsKYa3b3fvaA=", + "lastModified": 1697915759, + "narHash": "sha256-WyMj5jGcecD+KC8gEs+wFth1J1wjisZf8kVZH13f1Zo=", "owner": "nixos", "repo": "nixpkgs", - "rev": "2646b294a146df2781b1ca49092450e8a32814e1", + "rev": "51d906d2341c9e866e48c2efcaac0f2d70bfd43e", "type": "github" }, "original": { @@ -34,10 +34,27 @@ "type": "github" } }, + "nixpkgs-stable": { + "locked": { + "lastModified": 1697851979, + "narHash": "sha256-lJ8k4qkkwdvi+t/Xc6Fn74kUuobpu9ynPGxNZR6OwoA=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "5550a85a087c04ddcace7f892b0bdc9d8bb080c8", + "type": "github" + }, + "original": { + "owner": "nixos", + "ref": "nixos-23.05", + "repo": "nixpkgs", + "type": "github" + } + }, "root": { "inputs": { "flake-utils": "flake-utils", - "nixpkgs": "nixpkgs" + "nixpkgs": "nixpkgs", + "nixpkgs-stable": "nixpkgs-stable" } }, "systems": { diff --git a/flake.nix b/flake.nix index 562f87dde1..de89dba663 100644 --- a/flake.nix +++ b/flake.nix @@ -3,15 +3,18 @@ inputs = { nixpkgs.url = "github:nixos/nixpkgs/nixpkgs-unstable"; + nixpkgs-stable.url = "github:nixos/nixpkgs/nixos-23.05"; flake-utils.url = "github:numtide/flake-utils"; }; outputs = { self, nixpkgs, + nixpkgs-stable, flake-utils, }: flake-utils.lib.eachDefaultSystem (system: let + pkgsStable = import nixpkgs-stable { system = system; }; overlays = [ (self: super: { nodejs = super.nodejs_20; @@ -19,6 +22,7 @@ yarn = super.yarn.override { nodejs = super.nodejs_20; }; + tilt = pkgsStable.tilt; }) ]; pkgs = import nixpkgs {inherit overlays system;}; diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 1187ff7b13..5015f96e1b 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -5,9 +5,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# no support for "regex" type -# @starlark-rust: allow_string_literals_in_type_expr - load("@prelude//:paths.bzl", "paths") load( "@prelude//android:android_providers.bzl", @@ -60,7 +57,7 @@ load( "traverse_shared_library_info", ) load("@prelude//linking:strip.bzl", "strip_object") -load("@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", "post_order_traversal", "topo_sort", "topo_sort_by") +load("@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", "post_order_traversal", "pre_order_traversal", "pre_order_traversal_by") load("@prelude//utils:set.bzl", "set_type") # @unused Used as a type load("@prelude//utils:utils.bzl", "dedupe_by_value", "expect") @@ -175,7 +172,6 @@ def get_android_binary_native_library_info( native_merge_debug = ctx.actions.declare_output("native_merge.debug") dynamic_outputs.append(native_merge_debug) - if native_library_merge_sequence: # We serialize info about the linkable graph and the apk module mapping and pass that to an # external subcommand to apply a merge sequence algorithm and return us the merge mapping. for platform, deps in deps_by_platform.items(): @@ -184,7 +180,9 @@ def get_android_binary_native_library_info( linkables_debug = ctx.actions.write("linkables." + platform, list(graph_node_map.keys())) enhance_ctx.debug_output("linkables." + platform, linkables_debug) - flattened_linkable_graphs_by_platform[platform] = graph_node_map # _get_flattened_linkable_graph(ctx, graph_node_map) + flattened_linkable_graphs_by_platform[platform] = graph_node_map + + if native_library_merge_sequence: native_library_merge_input_file = ctx.actions.write_json("mergemap.input", { "linkable_graphs_by_platform": encode_linkable_graph_for_mergemap(flattened_linkable_graphs_by_platform), "native_library_merge_sequence": ctx.attrs.native_library_merge_sequence, @@ -201,8 +199,6 @@ def get_android_binary_native_library_info( enhance_ctx.debug_output("compute_merge_sequence", native_library_merge_dir) dynamic_inputs.append(native_library_merge_map) - elif has_native_merging: - flattened_linkable_graphs_by_platform = {platform: {} for platform in platform_to_original_native_linkables.keys()} mergemap_gencode_jar = None if has_native_merging and ctx.attrs.native_library_merge_code_generator: @@ -243,12 +239,12 @@ def get_android_binary_native_library_info( raw_target = str(target.raw_target()) merge_result = None for merge_lib, patterns in ctx.attrs.native_library_merge_map.items(): - if merge_result: - break for pattern in patterns: - if pattern.match(raw_target): + if regex(pattern).match(raw_target): merge_result = merge_lib break + if merge_result: + break if merge_result: merge_map[str(target)] = merge_result merge_map = ctx.actions.write_json("merge.map", merge_map_by_platform) @@ -260,7 +256,7 @@ def get_android_binary_native_library_info( ctx, { platform: LinkableMergeData( - glue_linkable = glue_linkables[platform], + glue_linkable = glue_linkables[platform] if glue_linkables else None, default_shared_libs = platform_to_original_native_linkables[platform], linkable_nodes = flattened_linkable_graphs_by_platform[platform], merge_map = merge_map_by_platform[platform], @@ -679,7 +675,7 @@ MergedLinkablesDebugInfo = record( # may be just one constituent) MergedSharedLibrary = record( soname = str, - lib = SharedLibrary.type, + lib = SharedLibrary, apk_module = str, # this only includes solib constituents that are included in the android merge map solib_constituents = list[str], @@ -691,7 +687,7 @@ MergedSharedLibrary = record( MergedLinkables = record( # dict[platform, dict[final_soname, MergedSharedLibrary]] shared_libs_by_platform = dict[str, dict[str, MergedSharedLibrary]], - debug_info = dict[str, MergedLinkablesDebugInfo.type], + debug_info = dict[str, MergedLinkablesDebugInfo], ) # Input data to the linkables merge process @@ -828,7 +824,7 @@ def _get_merged_linkables( linkable_nodes = merge_data.linkable_nodes linkable_nodes_graph = {k: dedupe(v.deps + v.exported_deps) for k, v in linkable_nodes.items()} - topo_sorted_targets = topo_sort(linkable_nodes_graph) + topo_sorted_targets = pre_order_traversal(linkable_nodes_graph) # first we collect basic information about each link group, this will populate the fields in LinkGroupData and # map target labels to their link group name. @@ -839,7 +835,7 @@ def _get_merged_linkables( expect(target not in target_to_link_group, "prelude internal error, target seen twice?") target_apk_module = merge_data.apk_module_graph(str(target.raw_target())) - link_group = merge_data.merge_map[str(target)] + link_group = merge_data.merge_map.get(str(target), None) if not link_group: link_group = str(target) link_groups[link_group] = LinkGroupData( @@ -938,8 +934,8 @@ def _get_merged_linkables( len(node_data.shared_libs) == 1, "unexpected shared_libs length for somerge of {} ({})".format(target, node_data.shared_libs), ) - expect(not node_data.deps, "prebuilt shared libs with deps not supported by somerge") - expect(not node_data.exported_deps, "prebuilt shared libs with exported_deps not supported by somerge") + expect(not node_data.deps, "prebuilt shared library `{}` with deps not supported by somerge".format(target)) + expect(not node_data.exported_deps, "prebuilt shared library `{}` with exported_deps not supported by somerge".format(target)) soname, shlib = node_data.shared_libs.items()[0] shared_lib = SharedLibrary( lib = shlib, @@ -1034,7 +1030,7 @@ def _get_merged_linkables( solib_constituents = [] link_group_deps = [] - ordered_group_constituents = topo_sort_by(group_data.constituents, get_merged_graph_traversal(group, False)) + ordered_group_constituents = pre_order_traversal_by(group_data.constituents, get_merged_graph_traversal(group, False)) representative_label = ordered_group_constituents[0] for key in ordered_group_constituents: real_constituents.append(key) @@ -1187,7 +1183,7 @@ def relink_libraries(ctx: AnalysisContext, libraries_by_platform: dict[str, dict shlib_graph[soname].append(dep) rev_shlib_graph.setdefault(dep, []).append(soname) needed_symbols_files = {} - for soname in topo_sort(shlib_graph): + for soname in pre_order_traversal(shlib_graph): if soname in unsupported_libs: relinked_libraries[soname] = shared_libraries[soname] continue @@ -1201,7 +1197,7 @@ def relink_libraries(ctx: AnalysisContext, libraries_by_platform: dict[str, dict create_relinker_version_script( ctx.actions, output = relinker_version_script, - relinker_blocklist = [experimental_regex(s) for s in ctx.attrs.relinker_whitelist], + relinker_blocklist = [regex(s) for s in ctx.attrs.relinker_whitelist], provided_symbols = provided_symbols_file, needed_symbols = needed_symbols_for_this, ) @@ -1229,7 +1225,7 @@ def relink_libraries(ctx: AnalysisContext, libraries_by_platform: dict[str, dict def extract_provided_symbols(ctx: AnalysisContext, toolchain: CxxToolchainInfo, lib: Artifact) -> Artifact: return extract_global_syms(ctx, toolchain, lib, "relinker_extract_provided_symbols") -def create_relinker_version_script(actions: AnalysisActions, relinker_blocklist: list["regex"], output: Artifact, provided_symbols: Artifact, needed_symbols: list[Artifact]): +def create_relinker_version_script(actions: AnalysisActions, relinker_blocklist: list[regex], output: Artifact, provided_symbols: Artifact, needed_symbols: list[Artifact]): def create_version_script(ctx, artifacts, outputs): all_needed_symbols = {} for symbols_file in needed_symbols: diff --git a/prelude/android/android_providers.bzl b/prelude/android/android_providers.bzl index 8dedbf46ea..8839bcab66 100644 --- a/prelude/android/android_providers.bzl +++ b/prelude/android/android_providers.bzl @@ -41,10 +41,10 @@ ExopackageResourcesInfo = record( ) AndroidBinaryNativeLibsInfo = record( - apk_under_test_prebuilt_native_library_dirs = list[PrebuiltNativeLibraryDir.type], + apk_under_test_prebuilt_native_library_dirs = list[PrebuiltNativeLibraryDir], # Indicates which shared lib producing targets are included in the binary. Used by instrumentation tests # to exclude those from the test apk. - apk_under_test_shared_libraries = list["target_label"], + apk_under_test_shared_libraries = list[TargetLabel], exopackage_info = ["ExopackageNativeInfo", None], root_module_native_lib_assets = list[Artifact], non_root_module_native_lib_assets = list[Artifact], @@ -54,7 +54,7 @@ AndroidBinaryNativeLibsInfo = record( AndroidBinaryResourcesInfo = record( # Optional information about resources that should be exopackaged - exopackage_info = [ExopackageResourcesInfo.type, None], + exopackage_info = [ExopackageResourcesInfo, None], # manifest to be used by the APK manifest = Artifact, # per-module manifests (packaged as assets) diff --git a/prelude/android/tools/unpack_aar.py b/prelude/android/tools/unpack_aar.py index 83c9212d65..d028a3966a 100644 --- a/prelude/android/tools/unpack_aar.py +++ b/prelude/android/tools/unpack_aar.py @@ -8,7 +8,6 @@ import argparse import pathlib -import shlex import shutil import zipfile from tempfile import TemporaryDirectory @@ -166,7 +165,7 @@ def main(): with open(jars_list, "w") as f: f.write("\n".join([str(jar) for jar in all_jars])) - combine_all_jars_cmd = shlex.split(jar_builder_tool) + [ + combine_all_jars_cmd = utils.shlex_split(jar_builder_tool) + [ "--entries-to-jar", jars_list, "--output", diff --git a/prelude/apple/apple_binary.bzl b/prelude/apple/apple_binary.bzl index ef7a2563e5..f9cfbbb84f 100644 --- a/prelude/apple/apple_binary.bzl +++ b/prelude/apple/apple_binary.bzl @@ -61,6 +61,7 @@ load(":apple_bundle_utility.bzl", "get_bundle_infos_from_graph", "merge_bundle_l load(":apple_code_signing_types.bzl", "AppleEntitlementsInfo") load(":apple_dsym.bzl", "DSYM_SUBTARGET", "get_apple_dsym") load(":apple_frameworks.bzl", "get_framework_search_path_flags") +load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_attr_value", "get_apple_genrule_deps_outputs") load(":apple_sdk_metadata.bzl", "IPhoneSimulatorSdkMetadata", "MacOSXCatalystSdkMetadata") load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") load(":apple_toolchain_types.bzl", "AppleToolchainInfo") @@ -108,11 +109,16 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: swift_compile, ) + genrule_deps_outputs = [] + if get_apple_build_genrule_deps_attr_value(ctx): + genrule_deps_outputs = get_apple_genrule_deps_outputs(cxx_attr_deps(ctx)) + stripped = get_apple_stripped_attr_value_with_default_fallback(ctx) constructor_params = CxxRuleConstructorParams( rule_type = "apple_binary", headers_layout = get_apple_cxx_headers_layout(ctx), extra_link_flags = extra_link_flags, + extra_hidden = genrule_deps_outputs, srcs = cxx_srcs, additional = CxxRuleAdditionalParams( srcs = swift_srcs, @@ -122,6 +128,14 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: # follow. static_external_debug_info = swift_debug_info.static, shared_external_debug_info = swift_debug_info.shared, + subtargets = { + "swift-compilation-database": [ + DefaultInfo( + default_output = swift_compile.compilation_database.db if swift_compile else None, + other_outputs = [swift_compile.compilation_database.other_outputs] if swift_compile else [], + ), + ], + }, ), extra_link_input = swift_object_files, extra_link_input_has_external_debug_info = True, @@ -144,9 +158,10 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: unstripped_binary = None expect(unstripped_binary != None, "Expect to save unstripped_binary when stripped is enabled") unstripped_binary = cxx_output.unstripped_binary - cxx_output.sub_targets["unstripped"] = [DefaultInfo(default_output = unstripped_binary)] else: unstripped_binary = cxx_output.binary + cxx_output.sub_targets["unstripped"] = [DefaultInfo(default_output = unstripped_binary)] + dsym_artifact = get_apple_dsym( ctx = ctx, executable = unstripped_binary, diff --git a/prelude/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl index 4e89bc9d70..34921d2430 100644 --- a/prelude/apple/apple_bundle.bzl +++ b/prelude/apple/apple_bundle.bzl @@ -49,6 +49,7 @@ load(":apple_bundle_resources.bzl", "get_apple_bundle_resource_part_list", "get_ load(":apple_bundle_types.bzl", "AppleBinaryExtraOutputsInfo", "AppleBundleBinaryOutput", "AppleBundleExtraOutputsInfo", "AppleBundleInfo", "AppleBundleLinkerMapInfo", "AppleBundleResourceInfo") load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_default_binary_dep", "get_flattened_binary_deps", "get_product_name") load(":apple_dsym.bzl", "DSYM_INFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym", "get_apple_dsym_ext", "get_apple_dsym_info") +load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_attr_value", "get_apple_genrule_deps_outputs") load(":apple_sdk.bzl", "get_apple_sdk_name") load(":apple_universal_binaries.bzl", "create_universal_binary") load( @@ -294,7 +295,18 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: primary_binary_rel_path = get_apple_bundle_part_relative_destination_path(ctx, primary_binary_part) - sub_targets = assemble_bundle(ctx, bundle, apple_bundle_part_list_output.parts, apple_bundle_part_list_output.info_plist_part, SwiftStdlibArguments(primary_binary_rel_path = primary_binary_rel_path)) + genrule_deps_outputs = [] + if get_apple_build_genrule_deps_attr_value(ctx): + genrule_deps_outputs = get_apple_genrule_deps_outputs(ctx.attrs.deps) + + sub_targets = assemble_bundle( + ctx, + bundle, + apple_bundle_part_list_output.parts, + apple_bundle_part_list_output.info_plist_part, + SwiftStdlibArguments(primary_binary_rel_path = primary_binary_rel_path), + genrule_deps_outputs, + ) sub_targets.update(aggregated_debug_info.sub_targets) primary_binary_path = cmd_args([bundle, primary_binary_rel_path], delimiter = "/") diff --git a/prelude/apple/apple_bundle_macro_layer.bzl b/prelude/apple/apple_bundle_macro_layer.bzl deleted file mode 100644 index 3f8e98f7c6..0000000000 --- a/prelude/apple/apple_bundle_macro_layer.bzl +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -load(":apple_bundle_config.bzl", "apple_bundle_config") -load(":apple_info_plist_substitutions_parsing.bzl", "parse_codesign_entitlements") -load(":apple_resource_bundle.bzl", "make_resource_bundle_rule") - -def apple_bundle_macro_impl(apple_bundle_rule, apple_resource_bundle_rule, **kwargs): - info_plist_substitutions = kwargs.get("info_plist_substitutions") - kwargs.update(apple_bundle_config()) - apple_bundle_rule( - _codesign_entitlements = parse_codesign_entitlements(info_plist_substitutions), - _resource_bundle = make_resource_bundle_rule(apple_resource_bundle_rule, **kwargs), - **kwargs - ) diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index 9447cd8231..09e6f0b4bb 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -46,7 +46,8 @@ def assemble_bundle( bundle: Artifact, parts: list[AppleBundlePart], info_plist_part: [AppleBundlePart, None], - swift_stdlib_args: [SwiftStdlibArguments, None]) -> dict[str, list[Provider]]: + swift_stdlib_args: [SwiftStdlibArguments, None], + extra_hidden: list[Artifact] = []) -> dict[str, list[Provider]]: """ Returns extra subtargets related to bundling. """ @@ -177,6 +178,9 @@ def assemble_bundle( command.add(codesign_configuration_args) + # Ensures any genrule deps get built, such targets are used for validation + command.hidden(extra_hidden) + env = {} cache_buster = ctx.attrs._bundling_cache_buster if cache_buster: diff --git a/prelude/apple/apple_genrule_deps.bzl b/prelude/apple/apple_genrule_deps.bzl new file mode 100644 index 0000000000..756a5bdd3e --- /dev/null +++ b/prelude/apple/apple_genrule_deps.bzl @@ -0,0 +1,47 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:genrule_types.bzl", "GENRULE_MARKER_SUBTARGET_NAME") + +def get_apple_genrule_deps_outputs(deps: list[Dependency]) -> list[Artifact]: + artifacts = [] + for dep in deps: + default_info = dep[DefaultInfo] + if GENRULE_MARKER_SUBTARGET_NAME in default_info.sub_targets: + artifacts += default_info.default_outputs + return artifacts + +def get_apple_build_genrule_deps_attr_value(ctx: AnalysisContext) -> bool: + build_genrule_deps = ctx.attrs.build_genrule_deps + if build_genrule_deps != None: + # `build_genrule_deps` present on a target takes priority + return build_genrule_deps + + # Fallback to the default value which is driven by buckconfig + select() + return ctx.attrs._build_genrule_deps + +def get_apple_build_genrule_deps_default_kwargs() -> dict[str, typing.Any]: + return { + APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: _build_genrule_deps_default_enabled(), + } + +def _build_genrule_deps_default_enabled() -> typing.Any: + buckconfig_value = read_root_config("apple", "build_genrule_deps", None) + if buckconfig_value != None: + return buckconfig_value.lower() == "true" + + return select({ + "DEFAULT": False, + # TODO(mgd): Make `config//` references possible from macro layer + "ovr_config//features/apple/constraints:build_genrule_deps_enabled": True, + }) + +APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME = "_build_genrule_deps" +APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE = attrs.bool(default = False) + +APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME = "build_genrule_deps" +APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE = attrs.option(attrs.bool(), default = None) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index a1bc0b4bb4..b2d04862df 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -230,6 +230,12 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte static_external_debug_info = swift_debug_info.static, shared_external_debug_info = swift_debug_info.shared, subtargets = { + "swift-compilation-database": [ + DefaultInfo( + default_output = swift_compile.compilation_database.db if swift_compile else None, + other_outputs = [swift_compile.compilation_database.other_outputs] if swift_compile else [], + ), + ], "swift-compile": [DefaultInfo(default_output = swift_compile.object_file if swift_compile else None)], }, additional_providers_factory = additional_providers_factory, diff --git a/prelude/apple/apple_macro_layer.bzl b/prelude/apple/apple_macro_layer.bzl index 11bcd47e2f..1349ed47b0 100644 --- a/prelude/apple/apple_macro_layer.bzl +++ b/prelude/apple/apple_macro_layer.bzl @@ -5,7 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load(":apple_bundle_config.bzl", "apple_bundle_config") +load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_default_kwargs") +load(":apple_info_plist_substitutions_parsing.bzl", "parse_codesign_entitlements") load(":apple_package_config.bzl", "apple_package_config") +load(":apple_resource_bundle.bzl", "make_resource_bundle_rule") load( ":apple_rules_impl_utility.bzl", "APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME", @@ -48,6 +52,10 @@ _APPLE_BINARY_LOCAL_EXECUTION_OVERRIDES = [ ), ] +_APPLE_TEST_LOCAL_EXECUTION_OVERRIDES = [ + APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE, +] + def apple_macro_layer_set_bool_override_attrs_from_config(overrides: list[AppleBuckConfigAttributeOverride]) -> dict[str, Select]: attribs = {} for override in overrides: @@ -63,6 +71,27 @@ def apple_macro_layer_set_bool_override_attrs_from_config(overrides: list[AppleB }) return attribs +def apple_test_macro_impl(apple_test_rule, apple_resource_bundle_rule, **kwargs): + kwargs.update(apple_bundle_config()) + kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_TEST_LOCAL_EXECUTION_OVERRIDES)) + + # `extension` is used both by `apple_test` and `apple_resource_bundle`, so provide default here + kwargs["extension"] = kwargs.pop("extension", "xctest") + apple_test_rule( + _resource_bundle = make_resource_bundle_rule(apple_resource_bundle_rule, **kwargs), + **kwargs + ) + +def apple_bundle_macro_impl(apple_bundle_rule, apple_resource_bundle_rule, **kwargs): + info_plist_substitutions = kwargs.get("info_plist_substitutions") + kwargs.update(apple_bundle_config()) + kwargs.update(get_apple_build_genrule_deps_default_kwargs()) + apple_bundle_rule( + _codesign_entitlements = parse_codesign_entitlements(info_plist_substitutions), + _resource_bundle = make_resource_bundle_rule(apple_resource_bundle_rule, **kwargs), + **kwargs + ) + def apple_library_macro_impl(apple_library_rule = None, **kwargs): kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_LIBRARY_LOCAL_EXECUTION_OVERRIDES)) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config([APPLE_STRIPPED_DEFAULT])) @@ -71,6 +100,7 @@ def apple_library_macro_impl(apple_library_rule = None, **kwargs): def apple_binary_macro_impl(apple_binary_rule = None, apple_universal_executable = None, **kwargs): kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_BINARY_LOCAL_EXECUTION_OVERRIDES)) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config([APPLE_STRIPPED_DEFAULT])) + kwargs.update(get_apple_build_genrule_deps_default_kwargs()) binary_name = kwargs.pop("name") diff --git a/prelude/apple/apple_resource_bundle.bzl b/prelude/apple/apple_resource_bundle.bzl index d32030728e..0ed45dfb4d 100644 --- a/prelude/apple/apple_resource_bundle.bzl +++ b/prelude/apple/apple_resource_bundle.bzl @@ -59,7 +59,8 @@ _RESOURCE_BUNDLE_FIELDS = [ def _is_resources_toolchain_enabled() -> bool: is_arvr_query_mode = read_root_config("fb", "arvr_query_mode") in ("True", "true") - if is_arvr_query_mode: + is_xplat_query_mode = read_root_config("mode", "is_xplat_mode_query") in ("True", "true") + if is_arvr_query_mode or is_xplat_query_mode: # Avoid returning buck2-only targets return False diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 176636674b..6382893463 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -5,6 +5,13 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load( + "@prelude//apple:apple_genrule_deps.bzl", + "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME", + "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE", + "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME", + "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE", +) load("@prelude//apple/swift:swift_toolchain.bzl", "swift_toolchain_impl") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftObjectFormat") load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") @@ -27,6 +34,7 @@ load( "APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME", "apple_bundle_extra_attrs", "apple_test_extra_attrs", + "get_apple_bundle_toolchain_attr", "get_apple_toolchain_attr", "get_apple_xctoolchain_attr", "get_apple_xctoolchain_bundle_id_attr", @@ -88,6 +96,8 @@ extra_attributes = { "_apple_xctoolchain": get_apple_xctoolchain_attr(), "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), "_stripped_default": attrs.bool(default = False), + APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, + APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, }, "apple_bundle": apple_bundle_extra_attrs(), "apple_library": { @@ -116,7 +126,7 @@ extra_attributes = { "packager": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "packager_args": attrs.list(attrs.arg(), default = []), "validator": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), - "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, + "_apple_toolchain": get_apple_bundle_toolchain_attr(), # FIXME: prelude// should be standalone (not refer to fbsource//) "_apple_tools": attrs.exec_dep(default = "fbsource//xplat/buck2/platform/apple:apple-tools", providers = [AppleToolsInfo]), "_ipa_compression_level": attrs.enum(IpaCompressionLevel.values()), @@ -196,6 +206,7 @@ extra_attributes = { }, "swift_toolchain": { "architecture": attrs.option(attrs.string(), default = None), # TODO(T115173356): Make field non-optional + "make_swift_comp_db": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//apple/tools:make_swift_comp_db")), "object_format": attrs.enum(SwiftObjectFormat.values(), default = "object"), # A placeholder tool that can be used to set up toolchain constraints. # Useful when fat and thin toolchahins share the same underlying tools via `command_alias()`, diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 0069e25bf4..8ab755f98f 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -8,6 +8,13 @@ load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_system_identification_attrs") load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo") load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignType") +load( + "@prelude//apple:apple_genrule_deps.bzl", + "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME", + "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE", + "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME", + "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE", +) load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") load("@prelude//apple/user:apple_selective_debugging.bzl", "AppleSelectiveDebuggingInfo") load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") @@ -21,7 +28,7 @@ def get_apple_toolchain_attr(): # FIXME: prelude// should be standalone (not refer to fbcode//) return attrs.toolchain_dep(default = "fbcode//buck2/platform/toolchain:apple-default", providers = [AppleToolchainInfo]) -def _get_apple_bundle_toolchain_attr(): +def get_apple_bundle_toolchain_attr(): # FIXME: prelude// should be standalone (not refer to fbcode//) return attrs.toolchain_dep(default = "fbcode//buck2/platform/toolchain:apple-bundle", providers = [AppleToolchainInfo]) @@ -101,8 +108,10 @@ def apple_bundle_extra_attrs(): "selective_debugging": attrs.option(attrs.dep(providers = [AppleSelectiveDebuggingInfo]), default = None), "split_arch_dsym": attrs.bool(default = False), "universal": attrs.option(attrs.bool(), default = None), - "_apple_toolchain": _get_apple_bundle_toolchain_attr(), + "_apple_toolchain": get_apple_bundle_toolchain_attr(), "_codesign_entitlements": attrs.option(attrs.source(), default = None), + APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, + APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, } attribs.update(_apple_bundle_like_common_attrs()) return attribs diff --git a/prelude/apple/apple_test_macro_layer.bzl b/prelude/apple/apple_test_macro_layer.bzl deleted file mode 100644 index d4635c2d29..0000000000 --- a/prelude/apple/apple_test_macro_layer.bzl +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -load(":apple_bundle_config.bzl", "apple_bundle_config") -load(":apple_macro_layer.bzl", "APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE", "apple_macro_layer_set_bool_override_attrs_from_config") -load(":apple_resource_bundle.bzl", "make_resource_bundle_rule") - -_APPLE_TEST_LOCAL_EXECUTION_OVERRIDES = [ - APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE, -] - -def apple_test_macro_impl(apple_test_rule, apple_resource_bundle_rule, **kwargs): - kwargs.update(apple_bundle_config()) - kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_TEST_LOCAL_EXECUTION_OVERRIDES)) - - # `extension` is used both by `apple_test` and `apple_resource_bundle`, so provide default here - kwargs["extension"] = kwargs.pop("extension", "xctest") - apple_test_rule( - _resource_bundle = make_resource_bundle_rule(apple_resource_bundle_rule, **kwargs), - **kwargs - ) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index cfe7bcfb8a..2738d9af99 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -72,6 +72,11 @@ SwiftDependencyInfo = provider(fields = { "exported_swiftmodules": provider_field(SwiftCompiledModuleTset), }) +SwiftCompilationDatabase = record( + db = field(Artifact), + other_outputs = field(ArgLike), +) + SwiftCompilationOutput = record( # The object file output from compilation. object_file = field(Artifact), @@ -92,6 +97,8 @@ SwiftCompilationOutput = record( swift_debug_info = field(ArtifactTSet), # A tset of PCM artifacts used to compile a Swift module. clang_debug_info = field(ArtifactTSet), + # Info required for `[swift-compilation-database]` subtarget. + compilation_database = field(SwiftCompilationDatabase), ) SwiftDebugInfo = record( @@ -134,7 +141,10 @@ def get_swift_anonymous_targets(ctx: AnalysisContext, get_apple_library_provider ctx, direct_uncompiled_sdk_deps, ) - return ctx.actions.anon_targets(pcm_targets + sdk_pcm_targets + swift_interface_anon_targets).map(get_apple_library_providers) + return ctx.actions.anon_targets(pcm_targets + sdk_pcm_targets + swift_interface_anon_targets).promise.map(get_apple_library_providers) + +def _get_explicit_modules_forwards_warnings_as_errors() -> bool: + return read_root_config("swift", "explicit_modules_forwards_warnings_as_errors", "false").lower() == "true" def get_swift_cxx_flags(ctx: AnalysisContext) -> list[str]: """Iterates through `swift_compiler_flags` and returns a list of flags that might affect Clang compilation""" @@ -148,7 +158,11 @@ def get_swift_cxx_flags(ctx: AnalysisContext) -> list[str]: if next: gather.append("-Xcc") gather.append(str(f).replace('\"', "")) - next = str(f) == "\"-Xcc\"" + next = False + elif str(f) == "\"-Xcc\"": + next = True + elif _get_explicit_modules_forwards_warnings_as_errors() and str(f) == "\"-warnings-as-errors\"": + gather.append("-warnings-as-errors") if ctx.attrs.enable_cxx_interop: gather += ["-Xfrontend", "-enable-cxx-interop"] @@ -263,6 +277,7 @@ def compile_swift( argsfiles = argsfiles, swift_debug_info = extract_and_merge_swift_debug_infos(ctx, deps_providers, [output_swiftmodule]), clang_debug_info = extract_and_merge_clang_debug_infos(ctx, deps_providers), + compilation_database = _create_compilation_database(ctx, srcs, argsfiles.absolute[SWIFT_EXTENSION]), ) # Swift headers are postprocessed to make them compatible with Objective-C @@ -349,12 +364,12 @@ def _compile_with_argsfile( shell_quoted_args = cmd_args(shared_flags, quote = "shell") argsfile, _ = ctx.actions.write(extension + ".argsfile", shell_quoted_args, allow_args = True) input_args = [shared_flags] - cmd_form = cmd_args(argsfile, format = "@{}", delimiter = "").hidden(input_args) + cmd_form = cmd_args(cmd_args(argsfile, format = "@{}", delimiter = "")).hidden(input_args) + cmd_form.add([s.file for s in srcs]) cmd = cmd_args(toolchain.compiler) cmd.add(additional_flags) cmd.add(cmd_form) - cmd.add([s.file for s in srcs]) # If we prefer to execute locally (e.g., for perf reasons), ensure we upload to the cache, # so that CI builds populate caches used by developer machines. @@ -737,3 +752,29 @@ def get_swift_debug_infos( def _get_swift_shared_debug_info(swift_dependency_info: SwiftDependencyInfo) -> list[ArtifactTSet]: return [swift_dependency_info.debug_info_tset] if swift_dependency_info.debug_info_tset else [] + +def _get_project_root_file(ctx) -> Artifact: + content = cmd_args(ctx.label.project_root) + return ctx.actions.write("project_root_file", content, absolute = True) + +def _create_compilation_database( + ctx: AnalysisContext, + srcs: list[CxxSrcWithFlags], + argfile: CompileArgsfile) -> SwiftCompilationDatabase: + module_name = get_module_name(ctx) + + swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info + mk_comp_db = swift_toolchain.mk_swift_comp_db[RunInfo] + + indentifier = module_name + ".swift_comp_db.json" + cdb_artifact = ctx.actions.declare_output(indentifier) + cmd = cmd_args(mk_comp_db) + cmd.add(cmd_args(cdb_artifact.as_output(), format = "--output={}")) + cmd.add(cmd_args(_get_project_root_file(ctx), format = "--project-root-file={}")) + cmd.add(["--files"] + [s.file for s in srcs]) + + cmd.add("--") + cmd.add(argfile.cmd_form) + ctx.actions.run(cmd, category = "swift_compilation_database", identifier = indentifier) + + return SwiftCompilationDatabase(db = cdb_artifact, other_outputs = argfile.cmd_form) diff --git a/prelude/apple/swift/swift_pcm_compilation.bzl b/prelude/apple/swift/swift_pcm_compilation.bzl index 1610a6ae65..01626fce7e 100644 --- a/prelude/apple/swift/swift_pcm_compilation.bzl +++ b/prelude/apple/swift/swift_pcm_compilation.bzl @@ -200,7 +200,7 @@ def _swift_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Provider ctx.attrs.dep[SwiftPCMUncompiledInfo].exported_deps, ctx.attrs.swift_cxx_args, ) - return ctx.actions.anon_targets(sdk_pcm_deps_anon_targets + swift_pcm_anon_targets).map(k) + return ctx.actions.anon_targets(sdk_pcm_deps_anon_targets + swift_pcm_anon_targets).promise.map(k) _swift_pcm_compilation = rule( impl = _swift_pcm_compilation_impl, diff --git a/prelude/apple/swift/swift_sdk_pcm_compilation.bzl b/prelude/apple/swift/swift_sdk_pcm_compilation.bzl index 7c2ba9063c..911aa19c9d 100644 --- a/prelude/apple/swift/swift_sdk_pcm_compilation.bzl +++ b/prelude/apple/swift/swift_sdk_pcm_compilation.bzl @@ -221,7 +221,8 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov ctx.attrs.dep[SdkUncompiledModuleInfo].deps, ctx.attrs.swift_cxx_args, ) - return ctx.actions.anon_targets(clang_module_deps).map(k) + + return ctx.actions.anon_targets(clang_module_deps).promise.map(k) _swift_sdk_pcm_compilation = rule( impl = _swift_sdk_pcm_compilation_impl, diff --git a/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl b/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl index 4158aa9f58..20667ddaf8 100644 --- a/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl +++ b/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl @@ -105,7 +105,7 @@ def _swift_interface_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Pr # Compile the transitive swiftmodule deps. swift_module_deps = get_swift_interface_anon_targets(ctx, module_info.deps) - return ctx.actions.anon_targets(clang_module_deps + swift_module_deps).map(k) + return ctx.actions.anon_targets(clang_module_deps + swift_module_deps).promise.map(k) _swift_interface_compilation = rule( impl = _swift_interface_compilation_impl, diff --git a/prelude/apple/swift/swift_toolchain.bzl b/prelude/apple/swift/swift_toolchain.bzl index 9e7d4ef800..1496299f7c 100644 --- a/prelude/apple/swift/swift_toolchain.bzl +++ b/prelude/apple/swift/swift_toolchain.bzl @@ -71,5 +71,6 @@ def swift_toolchain_impl(ctx): runtime_run_paths = ctx.attrs.runtime_run_paths, supports_swift_cxx_interoperability_mode = ctx.attrs.supports_swift_cxx_interoperability_mode, supports_cxx_interop_requirement_at_import = ctx.attrs.supports_cxx_interop_requirement_at_import, + mk_swift_comp_db = ctx.attrs.make_swift_comp_db, ), ] diff --git a/prelude/apple/swift/swift_toolchain_types.bzl b/prelude/apple/swift/swift_toolchain_types.bzl index 6b0a5bc5e4..12676f535c 100644 --- a/prelude/apple/swift/swift_toolchain_types.bzl +++ b/prelude/apple/swift/swift_toolchain_types.bzl @@ -34,6 +34,7 @@ SwiftToolchainInfo = provider( "runtime_run_paths": provider_field(typing.Any, default = None), # [str] "supports_swift_cxx_interoperability_mode": provider_field(typing.Any, default = None), # bool "supports_cxx_interop_requirement_at_import": provider_field(typing.Any, default = None), # bool + "mk_swift_comp_db": provider_field(typing.Any, default = None), }, ) diff --git a/prelude/apple/tools/BUCK b/prelude/apple/tools/BUCK index 22077c630d..d49624186f 100644 --- a/prelude/apple/tools/BUCK +++ b/prelude/apple/tools/BUCK @@ -28,3 +28,9 @@ prelude.python_bootstrap_binary( main = "swift_objc_header_postprocess.py", visibility = ["PUBLIC"], ) + +prelude.python_bootstrap_binary( + name = "make_swift_comp_db", + main = "make_swift_comp_db.py", + visibility = ["PUBLIC"], +) diff --git a/prelude/apple/tools/make_swift_comp_db.py b/prelude/apple/tools/make_swift_comp_db.py new file mode 100755 index 0000000000..2a8dcb2984 --- /dev/null +++ b/prelude/apple/tools/make_swift_comp_db.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Utility to create Swift's compilation DBs + +$ make_swift_comp_db.py gen --output=entry.json foo.swift -- -I /path/ -Xcc -fno-implicit-modules +""" + +# pyre-unsafe + +import argparse +import json +import shlex +import sys + + +def gen(args): + + with open(args.project_root_file, "r") as project_root_file: + project_root = project_root_file.read().replace("\n", "") + + entry = {} + entry["files"] = list(args.files) + entry["directory"] = project_root + + arguments = [] + for arg in args.arguments: + if arg.startswith("@"): + with open(arg[1:]) as argsfile: + for line in argsfile: + # The argsfile's arguments are separated by newlines; we + # don't want those included in the argument list. + arguments.append(" ".join(shlex.split(line))) + else: + arguments.append(arg) + entry["arguments"] = arguments + + json.dump([entry], args.output, indent=2) + args.output.close() + + +def main(argv): + parser = argparse.ArgumentParser() + + parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout) + parser.add_argument("--files", nargs="*") + # A path to a file that contains project root + parser.add_argument("--project-root-file") + parser.add_argument("arguments", nargs="*") + + args = parser.parse_args(argv[1:]) + + gen(args) + + +sys.exit(main(sys.argv)) diff --git a/prelude/csharp/csharp.bzl b/prelude/csharp/csharp.bzl index 90abfe7e26..da2d4c6bd7 100644 --- a/prelude/csharp/csharp.bzl +++ b/prelude/csharp/csharp.bzl @@ -10,7 +10,7 @@ load(":toolchain.bzl", "CSharpToolchainInfo") # Describes either a reference to a Buck .NET target or a .NET framework DLL. DllReference = record( # `str` -> Path to a .NET framework DLL on the local machine. - # `Artifacft` -> Buck target dependency. + # `Artifact` -> Buck target dependency. reference = field([Artifact, str]), ) diff --git a/prelude/cxx/attr_selection.bzl b/prelude/cxx/attr_selection.bzl index 08b011642c..020040d8d5 100644 --- a/prelude/cxx/attr_selection.bzl +++ b/prelude/cxx/attr_selection.bzl @@ -53,7 +53,7 @@ def cxx_by_language_ext(x: dict[typing.Any, typing.Any], ext: str) -> list[typin elif ext in (".asm", ".asmpp"): key_pp = "asm_with_cpp" key_compiler = "asm" - elif ext in (".h", ".hpp"): + elif ext in (".h", ".hpp", ".hh", ".hxx", ".h++"): fail("Not allowed to have header files in the `srcs` attribute - put them in `headers`") else: fail("Unexpected file extension: " + ext) diff --git a/prelude/cxx/compile.bzl b/prelude/cxx/compile.bzl index 322ced4302..b10d69fb1b 100644 --- a/prelude/cxx/compile.bzl +++ b/prelude/cxx/compile.bzl @@ -58,6 +58,7 @@ CxxExtension = enum( ".hpp", ".hh", ".h++", + ".hxx", ) # File types for dep files @@ -140,6 +141,11 @@ CxxCompileOutput = record( clang_trace = field([Artifact, None], None), ) +_ABSOLUTE_ARGSFILE_SUBSTITUTIONS = [ + (regex("-filter-error=.+"), "-fcolor-diagnostics"), + (regex("-filter-ignore=.+"), "-fcolor-diagnostics"), +] + def create_compile_cmds( ctx: AnalysisContext, # TODO(nga): this is `CxxRuleConstructorParams`, @@ -364,12 +370,13 @@ def compile_cxx( ) # If we're building with split debugging, where the debug info is in the - # original object, then add the object as external debug info, *unless* - # we're doing LTO, which generates debug info at link time (*except* for - # fat LTO, which still generates native code and, therefore, debug info). + # original object, then add the object as external debug info + # FIXME: ThinLTO generates debug info in a separate dwo dir, but we still + # need to track object files if the object file is not compiled to bitcode. + # We should track whether ThinLTO is used on a per-object basis rather than + # globally on a toolchain level. object_has_external_debug_info = ( - toolchain.split_debug_mode == SplitDebugMode("single") and - linker_info.lto_mode in (LtoMode("none"), LtoMode("fat")) + toolchain.split_debug_mode == SplitDebugMode("single") ) # .S extension is native assembly code (machine level, processor specific) @@ -405,7 +412,7 @@ def _validate_target_headers(ctx: AnalysisContext, preprocessor: list[CPreproces def _get_compiler_info(toolchain: CxxToolchainInfo, ext: CxxExtension) -> typing.Any: compiler_info = None - if ext.value in (".cpp", ".cc", ".mm", ".cxx", ".c++", ".h", ".hpp"): + if ext.value in (".cpp", ".cc", ".mm", ".cxx", ".c++", ".h", ".hpp", ".hh", ".h++", ".hxx"): compiler_info = toolchain.cxx_compiler_info elif ext.value in (".c", ".m"): compiler_info = toolchain.c_compiler_info @@ -427,7 +434,7 @@ def _get_compiler_info(toolchain: CxxToolchainInfo, ext: CxxExtension) -> typing return compiler_info def _get_category(ext: CxxExtension) -> str: - if ext.value in (".cpp", ".cc", ".cxx", ".c++", ".h", ".hpp"): + if ext.value in (".cpp", ".cc", ".cxx", ".c++", ".h", ".hpp", ".hh", ".h++", ".hxx"): return "cxx_compile" if ext.value == ".c": return "c_compile" @@ -464,7 +471,7 @@ def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: return None # Return the file type aswell - if ext.value in (".cpp", ".cc", ".mm", ".cxx", ".c++", ".h", ".hpp"): + if ext.value in (".cpp", ".cc", ".mm", ".cxx", ".c++", ".h", ".hpp", ".hh", ".h++", ".hxx"): return DepFileType("cpp") elif ext.value in (".c", ".m"): return DepFileType("c") @@ -528,6 +535,11 @@ def _mk_argsfile( # to avoid "argument too long" errors if use_absolute_paths: args.add(cmd_args(preprocessor.set.project_as_args("abs_file_prefix_args"))) + + # HACK: Replace Xcode clang incompatible flags with compatible ones. + # TODO: Refactor this to be a true Xcode argsfile generating flow. + for re, sub in _ABSOLUTE_ARGSFILE_SUBSTITUTIONS: + args.replace_regex(re, sub) else: args.add(headers_tag.tag_artifacts(cmd_args(preprocessor.set.project_as_args("file_prefix_args")))) diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index caca6ed27b..ab63aa9d77 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -158,7 +158,7 @@ def _get_shared_link_style_sub_targets_and_providers( if output.dwp != None: sub_targets["dwp"] = [DefaultInfo(default_output = output.dwp)] if output.pdb != None: - sub_targets[PDB_SUB_TARGET] = get_pdb_providers(output.pdb) + sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = output.pdb, binary = output.default) cxx_toolchain = get_cxx_toolchain_info(ctx) if cxx_toolchain.dumpbin_toolchain_path != None: sub_targets[DUMPBIN_SUB_TARGET] = get_dumpbin_providers(ctx, output.default, cxx_toolchain.dumpbin_toolchain_path) @@ -477,7 +477,7 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: sub_targets["soname-lib"] = [DefaultInfo(default_output = soname_lib)] if shared_lib.pdb: - sub_targets[PDB_SUB_TARGET] = get_pdb_providers(shared_lib.pdb) + sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = shared_lib.pdb, binary = shared_lib.output) dumpbin_toolchain_path = get_cxx_toolchain_info(ctx).dumpbin_toolchain_path if dumpbin_toolchain_path != None: sub_targets[DUMPBIN_SUB_TARGET] = get_dumpbin_providers(ctx, shared_lib.output, dumpbin_toolchain_path) diff --git a/prelude/cxx/cxx_bolt.bzl b/prelude/cxx/cxx_bolt.bzl index 8928120017..271ab5e804 100644 --- a/prelude/cxx/cxx_bolt.bzl +++ b/prelude/cxx/cxx_bolt.bzl @@ -8,54 +8,14 @@ # BOLT (Binary Optimization Layout Tool) is a post link profile guided optimizer used for # performance-critical services in fbcode: https://www.internalfb.com/intern/wiki/HHVM-BOLT/ -load("@prelude//:local_only.bzl", "link_cxx_binary_locally") load(":cxx_context.bzl", "get_cxx_toolchain_info") def cxx_use_bolt(ctx: AnalysisContext) -> bool: cxx_toolchain_info = get_cxx_toolchain_info(ctx) return cxx_toolchain_info.bolt_enabled and ctx.attrs.bolt_profile != None -def bolt_gdb_index(ctx: AnalysisContext, bolt_output: Artifact, identifier: [str, None]) -> Artifact: - # Run gdb-indexer - # gdb-indexer -o - gdb_index_output_name = bolt_output.short_path.removesuffix("-pre_gdb_index") + "-gdb_index" - gdb_index_output = ctx.actions.declare_output(gdb_index_output_name) - gdb_index_args = cmd_args( - ctx.attrs.bolt_gdb_index, - bolt_output, - "-o", - gdb_index_output.as_output(), - ) - ctx.actions.run( - gdb_index_args, - category = "gdb_index", - identifier = identifier, - local_only = link_cxx_binary_locally(ctx), - ) - - # Run objcopy - # objcopy -R .gdb_index --add-section=.gdb_index= - objcopy_output_name = gdb_index_output_name.removesuffix("-gdb_index") - objcopy_output = ctx.actions.declare_output(objcopy_output_name) - objcopy_args = cmd_args( - get_cxx_toolchain_info(ctx).binary_utilities_info.objcopy, - "-R", - ".gdb_index", - cmd_args(gdb_index_output, format = "--add-section=.gdb_index={}"), - bolt_output, - objcopy_output.as_output(), - ) - ctx.actions.run( - objcopy_args, - category = "objcopy", - identifier = identifier, - local_only = link_cxx_binary_locally(ctx), - ) - - return objcopy_output - def bolt(ctx: AnalysisContext, prebolt_output: Artifact, identifier: [str, None]) -> Artifact: - output_name = prebolt_output.short_path.removesuffix("-wrapper") + ("-pre_gdb_index" if (ctx.attrs.bolt_gdb_index != None) else "") + output_name = prebolt_output.short_path.removesuffix("-wrapper") postbolt_output = ctx.actions.declare_output(output_name) bolt_msdk = get_cxx_toolchain_info(ctx).binary_utilities_info.bolt_msdk @@ -81,7 +41,4 @@ def bolt(ctx: AnalysisContext, prebolt_output: Artifact, identifier: [str, None] local_only = get_cxx_toolchain_info(ctx).linker_info.link_binaries_locally, ) - if ctx.attrs.bolt_gdb_index != None: - return bolt_gdb_index(ctx, postbolt_output, identifier) - return postbolt_output diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index aedf575e7d..6d260310f0 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -472,6 +472,13 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, dep_links, ] + impl_params.extra_link_args + # If there are hidden dependencies to this target then add them as + # hidden link args. + if impl_params.extra_hidden: + links.append( + LinkArgs(flags = cmd_args().hidden(impl_params.extra_hidden)), + ) + link_result = _link_into_executable( ctx, # If shlib lib tree generation is enabled, pass in the shared libs (which @@ -594,7 +601,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, if binary.pdb: # A `pdb` sub-target which generates the `.pdb` file for this binary. - sub_targets[PDB_SUB_TARGET] = get_pdb_providers(binary.pdb) + sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = binary.pdb, binary = binary.output) if toolchain_info.dumpbin_toolchain_path: sub_targets[DUMPBIN_SUB_TARGET] = get_dumpbin_providers(ctx, binary.output, toolchain_info.dumpbin_toolchain_path) @@ -645,6 +652,9 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, default_output = materialize_external_debug_info, )] + for additional_subtarget, subtarget_providers in impl_params.additional.subtargets.items(): + sub_targets[additional_subtarget] = subtarget_providers + return CxxExecutableOutput( binary = binary.output, unstripped_binary = binary.unstripped_output, diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index a8f5837ccf..b163c34175 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -806,10 +806,6 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc pass default_output = unknown() - if default_output != None and default_output.unstripped != None: - sub_targets["unstripped"] = [DefaultInfo( - default_outputs = [default_output.unstripped], - )] default_info = DefaultInfo( default_output = default_output.default if default_output != None else None, other_outputs = default_output.other if default_output != None else [], @@ -1028,10 +1024,11 @@ def _form_library_outputs( link_cmd_debug_output_file = make_link_command_debug_output_json_info(ctx, [link_cmd_debug_output]) providers.append(LinkCommandDebugOutputInfo(debug_outputs = [link_cmd_debug_output])) + unstripped = shlib.unstripped_output output = CxxLibraryOutput( output_style = LibOutputStyle("shared_lib"), default = shlib.output, - unstripped = shlib.unstripped_output, + unstripped = unstripped, object_files = compiled_srcs.pic.objects, external_debug_info = shlib.external_debug_info, dwp = shlib.dwp, @@ -1046,6 +1043,9 @@ def _form_library_outputs( "linker.filelist": [DefaultInfo( default_outputs = filter(None, [shlib.linker_filelist]), )], + "unstripped": [DefaultInfo( + default_output = unstripped, + )], }, pdb = shlib.pdb, implib = shlib.import_library, diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index 6dbc3a91c8..bc4d4771a9 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -212,8 +212,8 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): # to fail, so I need a DEFAULT here when some target without cpu constraint tries to configure against the # windows exec platform. "DEFAULT": None, - "ovr_config//cpu:x86_32": "fbsource//arvr/third-party/toolchains/visual_studio:14.28.29910-cl_32_and_tools", - "ovr_config//cpu:x86_64": "fbsource//arvr/third-party/toolchains/visual_studio:14.28.29910-cl_64_and_tools", + "ovr_config//cpu:x86_32": "fbsource//arvr/third-party/toolchains/visual_studio:cl_x86_and_tools", + "ovr_config//cpu:x86_64": "fbsource//arvr/third-party/toolchains/visual_studio:cl_x64_and_tools", }), }) if is_full_meta_repo() else None)), "_mk_comp_db": attrs.default_only(dep_type(providers = [RunInfo], default = "prelude//cxx/tools:make_comp_db")), diff --git a/prelude/cxx/cxx_types.bzl b/prelude/cxx/cxx_types.bzl index a0dfa96af9..9d4a914bf1 100644 --- a/prelude/cxx/cxx_types.bzl +++ b/prelude/cxx/cxx_types.bzl @@ -119,6 +119,8 @@ CxxRuleConstructorParams = record( # Additional information used to link every object produced by the rule, # flags are _both_ exported and used to link the target itself. extra_exported_link_flags = field(list[typing.Any], []), + # Additional hidden inputs for link or archive actions. + extra_hidden = field(list[Artifact], []), # Additional flags used _only_ when linking the target itself. # These flags are _not_ propagated up the dep tree. extra_link_flags = field(list[typing.Any], []), diff --git a/prelude/cxx/link.bzl b/prelude/cxx/link.bzl index d38d6fabf6..572e960849 100644 --- a/prelude/cxx/link.bzl +++ b/prelude/cxx/link.bzl @@ -312,6 +312,14 @@ _AnonLinkInfo = provider(fields = { "result": provider_field(typing.Any, default = None), # CxxLinkResult }) +# dwp and split_debug_output are optional outputs, but promise artifacts require an actual artifact +# when being resolved. Let's add some placeholders here so that we always generate an artifact when +# applying the map functions. +_AnonLinkInfoPlaceholder = provider(fields = { + "dwp": provider_field(typing.Any), + "split_debug_output": provider_field(typing.Any), +}) + def _anon_link_impl(ctx): (output, result_type, opts) = deserialize_anon_attrs(ctx.actions, ctx.label, ctx.attrs) @@ -322,13 +330,32 @@ def _anon_link_impl(ctx): opts = opts, ) - return [DefaultInfo(), _AnonLinkInfo(result = link_result)] + dwp_placeholder = ctx.actions.write("placeholder_dwp", "") + split_debug_output_placeholder = ctx.actions.write("placeholder_split_debug_output", "") + + return [ + DefaultInfo(), + _AnonLinkInfo(result = link_result), + _AnonLinkInfoPlaceholder(dwp = dwp_placeholder, split_debug_output = split_debug_output_placeholder), + ] -_anon_link_rule = rule( +_anon_link_rule = anon_rule( impl = _anon_link_impl, attrs = ANON_ATTRS, + artifact_promise_mappings = { + "dwp": lambda p: _get_link_artifact(p, "dwp"), + "output": lambda p: p[_AnonLinkInfo].result.linked_object.output, + "split_debug_output": lambda p: _get_link_artifact(p, "split_debug_output"), + }, ) +def _get_link_artifact(p: ProviderCollection, name: str) -> Artifact: + linked_object = p[_AnonLinkInfo].result.linked_object + if getattr(linked_object, name) != None: + return getattr(linked_object, name) + else: + return getattr(p[_AnonLinkInfoPlaceholder], name) + def _anon_cxx_link( ctx: AnalysisContext, output: str, @@ -337,7 +364,7 @@ def _anon_cxx_link( if opts.cxx_toolchain: fail("anon link requires getting toolchain from ctx.attrs._cxx_toolchain") cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] - anon_providers = ctx.actions.anon_target( + anon_link_target = ctx.actions.anon_target( _anon_link_rule, dict( _cxx_toolchain = ctx.attrs._cxx_toolchain, @@ -349,22 +376,16 @@ def _anon_cxx_link( ), ) - output = ctx.actions.artifact_promise( - anon_providers.map(lambda p: p[_AnonLinkInfo].result.linked_object.output), - short_path = output, - ) - dwp = None if dwp_available(cxx_toolchain): - dwp = ctx.actions.artifact_promise( - anon_providers.map(lambda p: p[_AnonLinkInfo].result.linked_object.dwp), - ) + dwp = anon_link_target.artifact("dwp") split_debug_output = None if generates_split_debug(cxx_toolchain): - split_debug_output = ctx.actions.artifact_promise( - anon_providers.map(lambda p: p[_AnonLinkInfo].result.linked_object.split_debug_output), - ) + split_debug_output = anon_link_target.artifact("split_debug_output") + + output = ctx.actions.assert_short_path(anon_link_target.artifact("output"), short_path = output) + external_debug_info = link_external_debug_info( ctx = ctx, links = opts.links, diff --git a/prelude/cxx/linker.bzl b/prelude/cxx/linker.bzl index c53acb1391..1d1143a183 100644 --- a/prelude/cxx/linker.bzl +++ b/prelude/cxx/linker.bzl @@ -282,8 +282,9 @@ def is_pdb_generated( return False def get_pdb_providers( - pdb: Artifact): - return [DefaultInfo(default_output = pdb)] + pdb: Artifact, + binary: Artifact): + return [DefaultInfo(default_output = pdb, other_outputs = [binary])] DUMPBIN_SUB_TARGET = "dumpbin" diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index db93c9162f..1a8970e4ed 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -676,6 +676,14 @@ def create_omnibus_libraries( spec = _build_omnibus_spec(ctx, graph) pic_behavior = get_cxx_toolchain_info(ctx).pic_behavior + if not allow_cache_upload: + # Gradually enable allow_cache_upload everywhere + h = hash(str(ctx.label)) + if h < 0: + h = -h + if h % 100 < 20: + allow_cache_upload = True + # Create dummy omnibus dummy_omnibus = create_dummy_omnibus(ctx, extra_ldflags) diff --git a/prelude/cxx/shared_library_interface.bzl b/prelude/cxx/shared_library_interface.bzl index 8b335b2b78..3ac819b044 100644 --- a/prelude/cxx/shared_library_interface.bzl +++ b/prelude/cxx/shared_library_interface.bzl @@ -43,7 +43,7 @@ def _anon_shared_library_interface_impl(ctx): return [DefaultInfo(), _InterfaceInfo(artifact = output)] # Anonymous wrapper for `extract_symbol_names`. -_anon_shared_library_interface = rule( +_anon_shared_library_interface = anon_rule( impl = _anon_shared_library_interface_impl, attrs = { "identifier": attrs.option(attrs.string(), default = None), @@ -51,6 +51,9 @@ _anon_shared_library_interface = rule( "shared_lib": attrs.source(), "_cxx_toolchain": attrs.dep(providers = [CxxToolchainInfo]), }, + artifact_promise_mappings = { + "shared_library_interface": lambda p: p[_InterfaceInfo].artifact, + }, ) def shared_library_interface( @@ -60,7 +63,7 @@ def shared_library_interface( output = paths.join("__shlib_intfs__", shared_lib.short_path) if anonymous: - anon_providers = ctx.actions.anon_target( + shared_lib_interface_artifact = ctx.actions.anon_target( _anon_shared_library_interface, dict( _cxx_toolchain = ctx.attrs._cxx_toolchain, @@ -68,11 +71,8 @@ def shared_library_interface( shared_lib = shared_lib, identifier = shared_lib.short_path, ), - ) - return ctx.actions.artifact_promise( - anon_providers.map(lambda p: p[_InterfaceInfo].artifact), - short_path = output, - ) + ).artifact("shared_library_interface") + return ctx.actions.assert_short_path(shared_lib_interface_artifact, short_path = output) else: return _shared_library_interface( ctx = ctx, diff --git a/prelude/cxx/symbols.bzl b/prelude/cxx/symbols.bzl index 70947e108c..0e93f1b638 100644 --- a/prelude/cxx/symbols.bzl +++ b/prelude/cxx/symbols.bzl @@ -99,7 +99,7 @@ def _anon_extract_symbol_names_impl(ctx): return [DefaultInfo(), _SymbolsInfo(artifact = output)] # Anonymous wrapper for `extract_symbol_names`. -_anon_extract_symbol_names_impl_rule = rule( +_anon_extract_symbol_names_impl_rule = anon_rule( impl = _anon_extract_symbol_names_impl, attrs = { "allow_cache_upload": attrs.bool(default = False), @@ -114,6 +114,9 @@ _anon_extract_symbol_names_impl_rule = rule( "undefined_only": attrs.bool(default = False), "_cxx_toolchain": attrs.dep(providers = [CxxToolchainInfo]), }, + artifact_promise_mappings = { + "symbols": lambda p: p[_SymbolsInfo].artifact, + }, ) def extract_symbol_names( @@ -131,18 +134,16 @@ def extract_symbol_names( cxx_toolchain_from_attrs = ctx.attrs._cxx_toolchain[CxxToolchainInfo] if cxx_toolchain != cxx_toolchain_from_attrs: fail("anon symbol extraction requires that the cxx_toolchain be from the _cxx_toolchain attr") - anon_providers = ctx.actions.anon_target( + artifact = ctx.actions.anon_target( _anon_extract_symbol_names_impl_rule, dict( _cxx_toolchain = ctx.attrs._cxx_toolchain, output = name, **kwargs ), - ) - return ctx.actions.artifact_promise( - anon_providers.map(lambda p: p[_SymbolsInfo].artifact), - short_path = paths.join("__symbols__", name), - ) + ).artifact("symbols") + + return ctx.actions.assert_short_path(artifact, short_path = paths.join("__symbols__", name)) else: return _extract_symbol_names( ctx = ctx, diff --git a/prelude/debugging/fdb.bxl b/prelude/debugging/fdb.bxl index bee32b041a..05a8b1c97e 100644 --- a/prelude/debugging/fdb.bxl +++ b/prelude/debugging/fdb.bxl @@ -27,7 +27,7 @@ load("@prelude//debugging/inspect_java.bzl", "inspect_java_rule") load("@prelude//debugging/labels.bzl", "DBG_INFO_EXEC", "DBG_INFO_REF", "get_info_ref", "get_label_or_mark") load("@prelude//debugging/types.bzl", "ScriptSettings") -def inspect_alias_rule(ctx: "bxl_ctx", actions: AnalysisActions, target: "target_node", settings: ScriptSettings): +def inspect_alias_rule(ctx: bxl.Context, actions: AnalysisActions, target: "target_node", settings: ScriptSettings): attrs = target.attrs_lazy() actual = attrs.get("actual") return inspect_any_target(ctx, actions, ctx.configured_targets(actual.value().configured_target()), settings) @@ -40,13 +40,15 @@ INSPECT_BY_RULE = { "prelude//rules.bzl:android_library": inspect_java_rule, "prelude//rules.bzl:configured_alias": inspect_alias_rule, "prelude//rules.bzl:java_binary": inspect_java_rule, + "prelude//rules.bzl:java_library": inspect_java_rule, "prelude//rules.bzl:java_test": inspect_java_rule, "prelude//rules.bzl:kotlin_binary": inspect_java_rule, + "prelude//rules.bzl:kotlin_library": inspect_java_rule, "prelude//rules.bzl:kotlin_test": inspect_java_rule, "prelude//rules.bzl:robolectric_test": inspect_java_rule, } -def inspect_info_ref_rule(ctx: "bxl_ctx", actions: AnalysisActions, target: "target_node", settings: ScriptSettings): +def inspect_info_ref_rule(ctx: bxl.Context, actions: AnalysisActions, target: "target_node", settings: ScriptSettings): aliased_target_label = get_info_ref(target.attrs_lazy().get("labels").value()) if not aliased_target_label: return inspect_default( @@ -69,7 +71,7 @@ INSPECT_BY_LABEL = { DBG_INFO_EXEC: inspect_dbg_exec, } -def inspect_any_target(ctx: "bxl_ctx", actions: AnalysisActions, target: "target_node", settings: ScriptSettings): +def inspect_any_target(ctx: bxl.Context, actions: AnalysisActions, target: "target_node", settings: ScriptSettings): attrs = target.attrs_lazy() labels = attrs.get("labels").value() if attrs.get("labels") else [] inspect_func = INSPECT_BY_RULE.get(rule_type(target), inspect_default) @@ -78,7 +80,7 @@ def inspect_any_target(ctx: "bxl_ctx", actions: AnalysisActions, target: "target return inspect_func(ctx, actions, target, settings) -def inspect(ctx: "bxl_ctx", actions: AnalysisActions, target: "target_node", settings: ScriptSettings): +def inspect(ctx: bxl.Context, actions: AnalysisActions, target: "target_node", settings: ScriptSettings): result = inspect_any_target(ctx, actions, target, settings) # when getting ExecInfo based on external action it's not possible to provide result as ExecInfo @@ -87,7 +89,7 @@ def inspect(ctx: "bxl_ctx", actions: AnalysisActions, target: "target_node", set return result return actions.write_json("out.json", result) -def inspect_target_impl(ctx: "bxl_ctx"): +def inspect_target_impl(ctx: bxl.Context): actions = ctx.bxl_actions().actions node = ctx.configured_targets(ctx.cli_args.target) ctx.output.print(ctx.output.ensure(inspect(ctx, actions, node, ScriptSettings( diff --git a/prelude/debugging/inspect_dbg_exec.bzl b/prelude/debugging/inspect_dbg_exec.bzl index c43df50a71..64f53e3be9 100644 --- a/prelude/debugging/inspect_dbg_exec.bzl +++ b/prelude/debugging/inspect_dbg_exec.bzl @@ -11,7 +11,7 @@ load("@prelude//debugging/common.bzl", "create_target_info", "target_name") load("@prelude//debugging/types.bzl", "JavaInfo", "ScriptSettings") load("@prelude//java/class_to_srcs.bzl", "JavaClassToSourceMapInfo") -def inspect_dbg_exec(ctx: "bxl_ctx", actions: AnalysisActions, target: "target_node", settings: ScriptSettings): +def inspect_dbg_exec(ctx: bxl.Context, actions: AnalysisActions, target: "target_node", settings: ScriptSettings): pointer_name = target_name(target) if not pointer_name.endswith("_fdb"): pointer_name = "{}_fdb".format(pointer_name) diff --git a/prelude/debugging/inspect_default.bzl b/prelude/debugging/inspect_default.bzl index 625154de85..479c0b5e35 100644 --- a/prelude/debugging/inspect_default.bzl +++ b/prelude/debugging/inspect_default.bzl @@ -12,7 +12,7 @@ load("@prelude//debugging/types.bzl", "ExecInfo", "ScriptSettings") # "inspect_default" is reused across "fdb.bxl" to provide a fallback default information # in case special handling for the rule type isn't implemented yet -def inspect_default(_ctx: "bxl_ctx", _actions: AnalysisActions, _target: "target_node", settings: ScriptSettings) -> ExecInfo: +def inspect_default(_ctx: bxl.Context, _actions: AnalysisActions, _target: "target_node", settings: ScriptSettings) -> ExecInfo: return ExecInfo( target_name = target_name(settings.target), target_info = create_target_info(settings.target), diff --git a/prelude/debugging/inspect_java.bzl b/prelude/debugging/inspect_java.bzl index 9b0f06d34a..a1a23036d7 100644 --- a/prelude/debugging/inspect_java.bzl +++ b/prelude/debugging/inspect_java.bzl @@ -11,7 +11,7 @@ load("@prelude//debugging/common.bzl", "create_target_info", "target_name") load("@prelude//debugging/types.bzl", "ExecInfo", "JavaInfo", "ScriptSettings", "TargetExtraInfo") load("@prelude//java/class_to_srcs.bzl", "JavaClassToSourceMapInfo") -def inspect_java_rule(ctx: "bxl_ctx", _actions: AnalysisActions, target: "target_node", settings: ScriptSettings) -> ExecInfo: +def inspect_java_rule(ctx: bxl.Context, _actions: AnalysisActions, target: "target_node", settings: ScriptSettings) -> ExecInfo: providers = ctx.analysis(target).providers() debuginfo = providers[JavaClassToSourceMapInfo].debuginfo if JavaClassToSourceMapInfo in providers else None if debuginfo: diff --git a/prelude/decls/common.bzl b/prelude/decls/common.bzl index 710d566b68..2ca044d8eb 100644 --- a/prelude/decls/common.bzl +++ b/prelude/decls/common.bzl @@ -11,6 +11,7 @@ # well-formatted (and then delete this TODO) load("@prelude//:build_mode.bzl", "BuildModeInfo") +load("@prelude//:is_full_meta_repo.bzl", "is_full_meta_repo") def validate_uri(_s): return True @@ -235,7 +236,10 @@ def _re_opts_for_tests_arg() -> Attr: ) def _re_action_key_provider_arg() -> Attr: - return attrs.dep(providers = [BuildModeInfo], default = "fbcode//buck2/platform/build_mode:build_mode") + if is_full_meta_repo(): + return attrs.dep(providers = [BuildModeInfo], default = "fbcode//buck2/platform/build_mode:build_mode") + else: + return attrs.option(attrs.dep(providers = [BuildModeInfo]), default = None) def _re_test_args() -> dict[str, Attr]: return { diff --git a/prelude/decls/genrule_common.bzl b/prelude/decls/genrule_common.bzl index 7bb6137476..0538c1ec46 100644 --- a/prelude/decls/genrule_common.bzl +++ b/prelude/decls/genrule_common.bzl @@ -64,8 +64,7 @@ def _cmd_arg(): The output file or directory for the `genrule()`. This variable will have whatever value is specified by - the `out` argument if not using\302\240 - named outputs + the `out` argument if not using named outputs. If using named outputs, this variable will be the output directory. diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl index c3dd61b459..8a8ef43993 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl @@ -119,19 +119,31 @@ handle_cast(_Request, _State) -> -spec initialize_hooks() -> state(). initialize_hooks() -> ConfiguredHooks = get_hooks_config(), - NormalizedConfiguredHooks = [{get_hook_module(Hook), get_hook_opts(Hook)} || Hook <- ConfiguredHooks], + NormalizedConfiguredHooks = [ + {get_hook_module(Hook), get_hook_opts(Hook), get_hook_priority(Hook)} + || Hook <- ConfiguredHooks + ], %% first we need the Id - HooksWithId = [{wrapped_id(Mod, Opts), Mod, Opts} || {Mod, Opts} <- NormalizedConfiguredHooks], + HooksWithId = [ + case Prio of + undefined -> {0, wrapped_id(Mod, Opts), Mod, Opts, Prio}; + _ -> {Prio, wrapped_id(Mod, Opts), Mod, Opts, Prio} + end + || {Mod, Opts, Prio} <- NormalizedConfiguredHooks + ], %% according to documentation, if two hooks have the same ID, the latter one get's dropped - PreInitHooks = lists:ukeysort(1, HooksWithId), + PreInitHooks0 = lists:ukeysort(2, HooksWithId), + %% now sort with configured prio the inits (default prio being 0) + PreInitHooks1 = lists:keysort(1, PreInitHooks0), + %% now let's run the inits in order and build the state {States, HooksWithPriority} = lists:foldl( - fun({Id, Mod, Opts}, {StatesAcc, HooksAcc}) -> - {Priority, HookState} = wrapped_init({Mod, Id}, Opts), + fun({_InitPrio, Id, Mod, Opts, ConfiguredPrio}, {StatesAcc, HooksAcc}) -> + {Priority, HookState} = wrapped_init({Mod, Id}, Opts, ConfiguredPrio), {StatesAcc#{Id => HookState}, [{Priority, {Mod, Id}} | HooksAcc]} end, {#{}, []}, - PreInitHooks + PreInitHooks1 ), %% sort hooks according to priority @@ -152,8 +164,28 @@ get_hooks_config() -> wrap_part(Part, Fun, State) -> wrap_init_end(Part, Fun, State). -wrap_init_end(Part, Fun, #{hooks := Hooks}) -> - WrappedWithPreAndPost = lists:foldl( +wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> + %% NOTE ON EXECUTION ORDER: + %% + %% As of OTP/26 CT's behaviour according to [https://www.erlang.org/doc/apps/common_test/ct_hooks_chapter#cth-execution-order]: + %% > By default, each CTH installed is executed in the order that they are installed for init calls, + %% > and then reversed for end calls. This is not always desired, so Common Test allows the user to specify + %% > a priority for each hook. + %% + %% Implicit here is: + %% - pre_init and post_init functions are executed in the same order + %% - the hook with the "highest numerical priority" will be the first to run pre_init_per xxxx + %% + %% Starting from OTP/27, CT adds a new option ct_hooks_order option. The behaviour above is called `test`, and a + %% new behaviour called `config` will be added, in which the order of the post_xxxx functions is reversed wrt pre_xxxx + %% (see [https://github.com/erlang/otp/issues/7397] for discussion, and [https://github.com/erlang/otp/pull/7496] + %% for the upcoming ct_hooks_order option). + %% + %% Here we implement only the behaviour that corresponds to the new `config` option. + + %% NB. we use a foldr to ensure that the first hook in HookInInstallationOrder is the innermost, so the first one to + %% be executed + WrappedWithPreAndPost = lists:foldr( fun(Hook, FunToWrap) -> fun(FullPathArg, ConfigArg0) -> PathArg = @@ -217,7 +249,7 @@ wrap_init_end(Part, Fun, #{hooks := Hooks}) -> end end, normalize_part(Part, Fun), - Hooks + HooksInInstallationOrder ), %% after the post_per functions we need to handle now failures, and call either on_tc_fail or on_tc_skip fun(PathArg, ConfigArg) -> @@ -249,7 +281,7 @@ wrap_init_end(Part, Fun, #{hooks := Hooks}) -> catch Class:Reason:Stacktrace -> {failed, {'EXIT', {{Class, Reason}, Stacktrace}}} end, - handle_post_result(Hooks, build_test_name(Part, PathArg), Suite, Result) + handle_post_result(HooksInInstallationOrder, build_test_name(Part, PathArg), Suite, Result) end. handle_post_result(Hooks, TestName, Suite, Result) -> @@ -327,12 +359,22 @@ build_test_name(end_per_testcase, Path) -> [Test, Group | _] = lists:reverse(Path), {Group, Test}. +-spec get_hook_module(module() | {module(), Options} | {module(), Options, Priority}) -> module() when + Options :: list(), Priority :: integer(). +get_hook_module({Mod, _, _}) -> Mod; get_hook_module({Mod, _}) -> Mod; get_hook_module(Mod) -> Mod. - +-spec get_hook_opts(module() | {module(), Options} | {module(), Options, Priority}) -> Options when + Options :: list(), Priority :: integer(). +get_hook_opts({_, Opts, _}) -> Opts; get_hook_opts({_, Opts}) -> Opts; get_hook_opts(_) -> []. +-spec get_hook_priority(module() | {module(), Options} | {module(), Options, Priority}) -> Priority when + Options :: list(), Priority :: integer(). +get_hook_priority({_, _, Prio}) -> Prio; +get_hook_priority(_) -> undefined. + normalize_part(Part, Fun) -> SafeFun = get_safe_part(Part, Fun), case level(Part) of @@ -381,12 +423,17 @@ wrapped_id(Mod, Opts) -> end, call_if_exists(Mod, id, [Opts], make_ref()). --spec wrapped_init(hook(), opts()) -> {non_neg_integer(), hook_state()}. -wrapped_init({Mod, Id}, Opts) -> - case Mod:init(Id, Opts) of - {ok, State} -> {0, State}; - {ok, State, Priority} -> {Priority, State}; - Error -> error({hooks_init_error, Error}) +-spec wrapped_init(hook(), opts(), integer()) -> {integer(), hook_state()}. +wrapped_init({Mod, Id}, Opts, ConfiguredPriority) -> + {InitPriority, InitState} = + case Mod:init(Id, Opts) of + {ok, State} -> {0, State}; + {ok, State, Priority} -> {Priority, State}; + Error -> error({hooks_init_error, Error}) + end, + case ConfiguredPriority of + undefined -> {InitPriority, InitState}; + _ -> {ConfiguredPriority, InitState} end. pre(init_per_suite) -> pre_init_per_suite; diff --git a/prelude/genrule.bzl b/prelude/genrule.bzl index af21b3c7b9..be8ba33c87 100644 --- a/prelude/genrule.bzl +++ b/prelude/genrule.bzl @@ -10,10 +10,11 @@ load("@prelude//:cache_mode.bzl", "CacheModeInfo") load("@prelude//:genrule_local_labels.bzl", "genrule_labels_require_local") load("@prelude//:genrule_toolchain.bzl", "GenruleToolchainInfo") +load("@prelude//:genrule_types.bzl", "GENRULE_MARKER_SUBTARGET_NAME", "GenruleMarkerInfo") load("@prelude//:is_full_meta_repo.bzl", "is_full_meta_repo") load("@prelude//android:build_only_native_code.bzl", "is_build_only_native_code") load("@prelude//os_lookup:defs.bzl", "OsLookup") -load("@prelude//utils:utils.bzl", "flatten", "value_or") +load("@prelude//utils:utils.bzl", "expect", "flatten", "value_or") GENRULE_OUT_DIR = "out" @@ -47,6 +48,14 @@ _BUILD_ROOT_LABELS = {label: True for label in [ # that behavior. _NO_SRCS_ENVIRONMENT_LABEL = "no_srcs_environment" +_WINDOWS_ENV_SUBSTITUTIONS = [ + # Replace $OUT and ${OUT} + (regex("\\$(OUT\\b|\\{OUT\\})"), "%OUT%"), + (regex("\\$(SRCDIR\\b|\\{SRCDIR\\})"), "%SRCDIR%"), + (regex("\\$(SRCS\\b|\\{SRCS\\})"), "%SRCS%"), + (regex("\\$(TMP\\b|\\{TMP\\})"), "%TMP%"), +] + def _requires_build_root(ctx: AnalysisContext) -> bool: for label in ctx.attrs.labels: if label in _BUILD_ROOT_LABELS: @@ -68,6 +77,7 @@ _USE_CACHE_MODE = is_full_meta_repo() # Extra attributes required by every genrule based on genrule_impl def genrule_attributes() -> dict[str, Attr]: attributes = { + "always_print_stderr": attrs.bool(default = False), "metadata_env_var": attrs.option(attrs.string(), default = None), "metadata_path": attrs.option(attrs.string(), default = None), "no_outputs_cleanup": attrs.bool(default = False), @@ -167,13 +177,11 @@ def process_genrule( # For backwards compatibility with Buck1. if is_windows: - # Replace $OUT and ${OUT} - cmd.replace_regex("\\$(OUT\\b|\\{OUT\\})", "%OUT%") - cmd.replace_regex("\\$(SRCDIR\\b|\\{SRCDIR\\})", "%SRCDIR%") - cmd.replace_regex("\\$(SRCS\\b|\\{SRCS\\})", "%SRCS%") - cmd.replace_regex("\\$(TMP\\b|\\{TMP\\})", "%TMP%") + for re, sub in _WINDOWS_ENV_SUBSTITUTIONS: + cmd.replace_regex(re, sub) + for extra_env_var in extra_env_vars: - cmd.replace_regex("\\$(%s\\b|\\{%s\\})" % (extra_env_var, extra_env_var), "%%%s%%" % extra_env_var) + cmd.replace_regex(regex("\\$(%s\\b|\\{%s\\})" % (extra_env_var, extra_env_var)), "%%%s%%" % extra_env_var) if _ignore_artifacts(ctx): cmd = cmd.ignore_artifacts() @@ -328,12 +336,21 @@ def process_genrule( category = category, identifier = identifier, no_outputs_cleanup = ctx.attrs.no_outputs_cleanup, + always_print_stderr = ctx.attrs.always_print_stderr, **metadata_args ) + # Use a subtarget to insert a marker, as callsites make assumptions about + # the providers of `process_genrule()`. We want to have the marker in + # `DefaultInfo` rather than in `genrule_impl()` because we want to identify + # all classes of genrule-like rules. + sub_targets = {k: [DefaultInfo(default_outputs = v)] for (k, v) in named_outputs.items()} + expect(GENRULE_MARKER_SUBTARGET_NAME not in sub_targets, "Conflicting private `{}` subtarget and named output".format(GENRULE_MARKER_SUBTARGET_NAME)) + sub_targets[GENRULE_MARKER_SUBTARGET_NAME] = [GenruleMarkerInfo()] + providers = [DefaultInfo( default_outputs = default_outputs, - sub_targets = {k: [DefaultInfo(default_outputs = v)] for (k, v) in named_outputs.items()}, + sub_targets = sub_targets, )] # The cxx_genrule also forwards here, and that doesn't have .executable, so use getattr diff --git a/prelude/genrule_types.bzl b/prelude/genrule_types.bzl new file mode 100644 index 0000000000..0793c705d4 --- /dev/null +++ b/prelude/genrule_types.bzl @@ -0,0 +1,12 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# A provider that's used as a marker for `genrule()`, allows dependents +# to distinguish such outputs +GenruleMarkerInfo = provider(fields = {}) + +GENRULE_MARKER_SUBTARGET_NAME = "genrule_marker" diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index a14eaf027f..762c2f9557 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -99,14 +99,33 @@ def _cgo( args = cmd_args() args.add(cmd_args(go_toolchain.cgo, format = "--cgo={}")) + c_compiler = cxx_toolchain.c_compiler_info + # linker = cxx_toolchain.linker_info + + # Passing fbcode-platform ldflags may create S365277, so I would + # comment this change until we really need to do it. + # ldflags = cmd_args( + # linker.linker_flags, + # go_toolchain.external_linker_flags, + # ) + + args.add( + cmd_args(c_compiler.compiler, format = "--env-cc={}"), + # cmd_args(ldflags, format = "--env-ldflags={}"), + ) + # TODO(agallagher): cgo outputs a dir with generated sources, but I'm not # sure how to pass in an output dir *and* enumerate the sources we know will # generated w/o v2 complaining that the output dir conflicts with the nested # artifacts. args.add(cmd_args(go_srcs[0].as_output(), format = "--output={}/..")) - args.add(cmd_args(cxx_toolchain.c_compiler_info.preprocessor, format = "--cpp={}")) args.add(cmd_args(pre_args, format = "--cpp={}")) args.add(cmd_args(pre_include_dirs, format = "--cpp={}")) + args.add(cmd_args(c_compiler.preprocessor_flags, format = "--cpp={}")) + args.add(cmd_args(c_compiler.compiler_flags, format = "--cpp={}")) + + # Passing the same value as go-build, because our -g flags break cgo in some buck modes + args.add(cmd_args(["-g"], format = "--cpp={}")) args.add(srcs) argsfile = ctx.actions.declare_output(paths.join(gen_dir, ".cgo.argsfile")) diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index b7f716aff2..ffb4f25cd7 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -37,7 +37,7 @@ def _out_root(shared: bool = False): def get_inherited_compile_pkgs(deps: list[Dependency]) -> dict[str, GoPkg]: return merge_pkgs([d[GoPkgCompileInfo].pkgs for d in deps if GoPkgCompileInfo in d]) -def get_filtered_srcs(ctx: AnalysisContext, srcs: list[Artifact], tests: bool = False) -> cmd_args: +def get_filtered_srcs(ctx: AnalysisContext, srcs: list[Artifact], tests: bool = False, force_disable_cgo: bool = False) -> cmd_args: """ Filter the input sources based on build pragma """ @@ -51,7 +51,7 @@ def get_filtered_srcs(ctx: AnalysisContext, srcs: list[Artifact], tests: bool = "__srcs__", {src.short_path: src for src in srcs}, ) - filter_cmd = get_toolchain_cmd_args(go_toolchain, go_root = True) + filter_cmd = get_toolchain_cmd_args(go_toolchain, go_root = True, force_disable_cgo = force_disable_cgo) filter_cmd.add(go_toolchain.filter_srcs[RunInfo]) filter_cmd.add(cmd_args(go_toolchain.go, format = "--go={}")) if tests: diff --git a/prelude/go/go_library.bzl b/prelude/go/go_library.bzl index 373fbd9984..2bc66e990c 100644 --- a/prelude/go/go_library.bzl +++ b/prelude/go/go_library.bzl @@ -33,7 +33,9 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: pkg_name = None if ctx.attrs.srcs: pkg_name = go_attr_pkg_name(ctx) - srcs = get_filtered_srcs(ctx, ctx.attrs.srcs) + + # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. + srcs = get_filtered_srcs(ctx, ctx.attrs.srcs, force_disable_cgo = True) static_pkg = compile( ctx, diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index 11984687e4..487caa1285 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -109,5 +109,7 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: env = ctx.attrs.env, labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, + # FIXME: Consider setting to true + run_from_project_root = False, ), ) + [DefaultInfo(default_output = bin, other_outputs = [gen_main] + runtime_files)] diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 8c1f974dcd..175602c124 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -9,13 +9,13 @@ load("@prelude//cxx:cxx_library_utility.bzl", "cxx_inherited_link_info") load( "@prelude//cxx:cxx_link_utility.bzl", "executable_shared_lib_arguments", + "make_link_args", ) load( "@prelude//linking:link_info.bzl", "LinkStyle", "get_link_args_for_strategy", "to_link_strategy", - "unpack_link_args", ) load( "@prelude//linking:shared_libraries.bzl", @@ -138,12 +138,6 @@ def link( runtime_files, extra_link_args = _process_shared_dependencies(ctx, output, deps, link_style) - # Gather external link args from deps. - ext_links = get_link_args_for_strategy(ctx, cxx_inherited_link_info(deps), to_link_strategy(link_style)) - ext_link_args = cmd_args(unpack_link_args(ext_links)) - ext_link_args.add(cmd_args(extra_link_args, quote = "shell")) - ext_link_args.add(external_linker_flags) - if link_mode == None: if go_toolchain.cxx_toolchain_for_linking != None: link_mode = "external" @@ -153,12 +147,25 @@ def link( if link_mode == "external": is_win = ctx.attrs._exec_os_type[OsLookup].platform == "windows" + cxx_toolchain = go_toolchain.cxx_toolchain_for_linking + + # Gather external link args from deps. + ext_links = get_link_args_for_strategy(ctx, cxx_inherited_link_info(deps), to_link_strategy(link_style)) + ext_link_args_output = make_link_args( + ctx.actions, + cxx_toolchain, + [ext_links], + ) + ext_link_args = cmd_args() + ext_link_args.add(cmd_args(extra_link_args, quote = "shell")) + ext_link_args.add(external_linker_flags) + ext_link_args.add(ext_link_args_output.link_args) + ext_link_args.hidden(ext_link_args_output.hidden) # Delegate to C++ linker... # TODO: It feels a bit inefficient to generate a wrapper file for every # link. Is there some way to etract the first arg of `RunInfo`? Or maybe # we can generate te platform-specific stuff once and re-use? - cxx_toolchain = go_toolchain.cxx_toolchain_for_linking cxx_link_cmd = cmd_args( [ cxx_toolchain.linker_info.linker, diff --git a/prelude/go/packages.bzl b/prelude/go/packages.bzl index 8b2ba15886..630a4f2638 100644 --- a/prelude/go/packages.bzl +++ b/prelude/go/packages.bzl @@ -60,8 +60,13 @@ def stdlib_pkg_artifacts(toolchain: GoToolchainInfo, shared: bool = False) -> di pkgs = {} for pkg in stdlib_pkgs: - _, _, pkg_relpath = pkg.short_path.removeprefix("prebuilt_std/").partition("/") # like net/http.a - name = pkg_relpath.removesuffix(".a") # like net/http + # remove first directory like `pgk` + _, _, temp_path = pkg.short_path.partition("/") + + # remove second directory like `darwin_amd64` + # now we have name like `net/http.a` + _, _, pkg_relpath = temp_path.partition("/") + name = pkg_relpath.removesuffix(".a") # like `net/http` pkgs[name] = pkg return pkgs diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index 1518c495a6..73d7892428 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -35,7 +35,7 @@ GoToolchainInfo = provider( }, ) -def get_toolchain_cmd_args(toolchain: GoToolchainInfo, go_root = True) -> cmd_args: +def get_toolchain_cmd_args(toolchain: GoToolchainInfo, go_root = True, force_disable_cgo = False) -> cmd_args: cmd = cmd_args("env") if toolchain.env_go_arch != None: cmd.add("GOARCH={}".format(toolchain.env_go_arch)) @@ -46,10 +46,13 @@ def get_toolchain_cmd_args(toolchain: GoToolchainInfo, go_root = True) -> cmd_ar if go_root and toolchain.env_go_root != None: cmd.add(cmd_args(toolchain.env_go_root, format = "GOROOT={}")) - # CGO is enabled by default for native compilation, but we need to set it - # explicitly for cross-builds: - # https://go-review.googlesource.com/c/go/+/12603/2/src/cmd/cgo/doc.go - if toolchain.cgo != None: - cmd.add("CGO_ENABLED=1") + if force_disable_cgo: + cmd.add("CGO_ENABLED=0") + else: + # CGO is enabled by default for native compilation, but we need to set it + # explicitly for cross-builds: + # https://go-review.googlesource.com/c/go/+/12603/2/src/cmd/cgo/doc.go + if toolchain.cgo != None: + cmd.add("CGO_ENABLED=1") return cmd diff --git a/prelude/go/tools/cgo_wrapper.py b/prelude/go/tools/cgo_wrapper.py index b7c71b91a5..8c5ba97626 100644 --- a/prelude/go/tools/cgo_wrapper.py +++ b/prelude/go/tools/cgo_wrapper.py @@ -22,13 +22,17 @@ def main(argv): parser.add_argument("--cgo", action="append", default=[]) parser.add_argument("--output", required=True, type=Path) parser.add_argument("--cpp", action="append", default=[]) + parser.add_argument("--env-cc", action="append", default=[]) + parser.add_argument("--env-ldflags", action="append", default=[]) parser.add_argument("srcs", type=Path, nargs="*") args = parser.parse_args(argv[1:]) output = args.output.resolve(strict=False) os.makedirs(output, exist_ok=True) - os.environ["CC"] = args.cpp[0] + env = os.environ.copy() + env["CC"] = " ".join(args.env_cc) + env["CGO_LDFLAGS"] = " ".join(args.env_ldflags) cmd = [] cmd.extend(args.cgo) @@ -40,13 +44,13 @@ def main(argv): # cmd.append(cxxCompilerFlags) with tempfile.NamedTemporaryFile("w", delete=False) as argsfile: - for arg in args.cpp[1:]: + for arg in args.cpp: print(pipes.quote(arg), file=argsfile) argsfile.flush() cmd.append("@" + argsfile.name) cmd.extend(args.srcs) - return subprocess.call(cmd) + return subprocess.call(cmd, env=env) sys.exit(main(sys.argv)) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 41ec22ffde..791a42f976 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -117,7 +117,7 @@ HaskellToolchainInfo = provider( "package_name_prefix": provider_field(typing.Any, default = None), "packager": provider_field(typing.Any, default = None), "use_argsfile": provider_field(typing.Any, default = None), - "support_expose_package": provider_field(typing.Any, default = None), + "support_expose_package": provider_field(bool, default = False), "archive_contents": provider_field(typing.Any, default = None), "ghci_script_template": provider_field(typing.Any, default = None), "ghci_iserv_template": provider_field(typing.Any, default = None), @@ -480,11 +480,19 @@ PackagesInfo = record( transitive_deps = field(list[HaskellLibraryInfo]), ) +def _package_flag(toolchain: HaskellToolchainInfo) -> str: + if toolchain.support_expose_package: + return "-expose-package" + else: + return "-package" + def get_packages_info( ctx: AnalysisContext, link_style: LinkStyle, specify_pkg_version: bool, enable_profiling: bool) -> PackagesInfo: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + # Collect library dependencies. Note that these don't need to be in a # particular order and we really want to remove duplicates (there # are a *lot* of duplicates). @@ -498,7 +506,8 @@ def get_packages_info( libs[lib.db] = lib # lib.db is a good enough unique key # base is special and gets exposed by default - exposed_package_args = cmd_args(["-expose-package", "base"]) + package_flag = _package_flag(haskell_toolchain) + exposed_package_args = cmd_args([package_flag, "base"]) packagedb_args = cmd_args() @@ -531,7 +540,7 @@ def get_packages_info( if (specify_pkg_version): pkg_name += "-{}".format(lib.version) - exposed_package_args.add("-expose-package", pkg_name) + exposed_package_args.add(package_flag, pkg_name) return PackagesInfo( exposed_package_args = exposed_package_args, @@ -635,7 +644,7 @@ def _compile( stubs.as_output(), ) - # Add -package-db and -expose-package flags for each Haskell + # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. packages_info = get_packages_info( ctx, @@ -782,7 +791,7 @@ def _make_package( "-c", _REGISTER_PACKAGE, "", - haskell_toolchain.packager[RunInfo], + haskell_toolchain.packager, db.as_output(), pkg_conf, ]).hidden(hi.values()).hidden(lib.values()), # needs hi, because ghc-pkg checks that the .hi files exist diff --git a/prelude/java/class_to_srcs.bzl b/prelude/java/class_to_srcs.bzl index ad686f9295..d8bc8fa03c 100644 --- a/prelude/java/class_to_srcs.bzl +++ b/prelude/java/class_to_srcs.bzl @@ -5,8 +5,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# @starlark-rust: allow_string_literals_in_type_expr - load( "@prelude//java:java_toolchain.bzl", "JavaTestToolchainInfo", # @unused Used as a type @@ -98,8 +96,9 @@ def maybe_create_class_to_source_map_debuginfo( cmd = cmd_args(java_toolchain.gen_class_to_source_map_debuginfo[RunInfo]) cmd.add("gen") cmd.add("-o", output.as_output()) - for src in srcs: - cmd.add(cmd_args(src)) + inputs_file = actions.write("sourcefiles.txt", srcs) + cmd.add(cmd_args(inputs_file, format = "@{}")) + cmd.hidden(srcs) actions.run(cmd, category = "class_to_srcs_map_debuginfo") return output @@ -108,7 +107,7 @@ def merge_class_to_source_map_from_jar( name: str, java_test_toolchain: JavaTestToolchainInfo, mapping: [Artifact, None] = None, - relative_to: ["cell_root", None] = None, + relative_to: [CellRoot, None] = None, # TODO(nga): I think this meant to be type, not default value. deps = [JavaClassToSourceMapInfo.type]) -> Artifact: output = actions.declare_output(name) @@ -142,7 +141,9 @@ def _create_merged_debug_info( JavaClassToSourceMapTset, children = [tset_debuginfo], ) - class_to_source_files = tset.project_as_args("class_to_src_map") - cmd.add(class_to_source_files) + input_files = tset.project_as_args("class_to_src_map") + input_list_file = actions.write("debuginfo_list.txt", input_files) + cmd.add(cmd_args(input_list_file, format = "@{}")) + cmd.hidden(input_files) actions.run(cmd, category = "merged_debuginfo") return output diff --git a/prelude/julia/julia_test.bzl b/prelude/julia/julia_test.bzl index d28e6e9c40..7b0d3e7ca4 100644 --- a/prelude/julia/julia_test.bzl +++ b/prelude/julia/julia_test.bzl @@ -14,6 +14,8 @@ def julia_test_impl(ctx: AnalysisContext) -> list[Provider]: type = "julia", command = [cmd], contacts = ctx.attrs.contacts, + # FIXME: Consider setting to true + run_from_project_root = False, ) return inject_test_run_info(ctx, external_runner_test_info) + [DefaultInfo(default_output = json_info_file)] diff --git a/prelude/linking/lto.bzl b/prelude/linking/lto.bzl index d1279c1808..f275d00590 100644 --- a/prelude/linking/lto.bzl +++ b/prelude/linking/lto.bzl @@ -5,8 +5,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# @starlark-rust: allow_string_literals_in_type_expr - load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") load("@prelude//cxx:debug.bzl", "SplitDebugMode") diff --git a/prelude/linking/strip.bzl b/prelude/linking/strip.bzl index 4705654555..341ab005dd 100644 --- a/prelude/linking/strip.bzl +++ b/prelude/linking/strip.bzl @@ -35,13 +35,16 @@ def _anon_strip_debug_info_impl(ctx): return [DefaultInfo(), _InterfaceInfo(artifact = output)] # Anonymous wrapper for `extract_symbol_names`. -_anon_strip_debug_info = rule( +_anon_strip_debug_info = anon_rule( impl = _anon_strip_debug_info_impl, attrs = { "obj": attrs.source(), "out": attrs.string(), "_cxx_toolchain": attrs.dep(providers = [CxxToolchainInfo]), }, + artifact_promise_mappings = { + "strip_debug_info": lambda p: p[_InterfaceInfo].artifact, + }, ) def strip_debug_info( @@ -50,18 +53,16 @@ def strip_debug_info( obj: Artifact, anonymous: bool = False) -> Artifact: if anonymous: - anon_providers = ctx.actions.anon_target( + strip_debug_info = ctx.actions.anon_target( _anon_strip_debug_info, dict( _cxx_toolchain = ctx.attrs._cxx_toolchain, out = out, obj = obj, ), - ) - return ctx.actions.artifact_promise( - anon_providers.map(lambda p: p[_InterfaceInfo].artifact), - short_path = out, - ) + ).artifact("strip_debug_info") + + return ctx.actions.assert_short_path(strip_debug_info, short_path = out) else: return _strip_debug_info( ctx = ctx, diff --git a/prelude/native.bzl b/prelude/native.bzl index 944994d30e..b9ec92be95 100644 --- a/prelude/native.bzl +++ b/prelude/native.bzl @@ -12,9 +12,7 @@ # **all** interpreted files. load("@prelude//android:cpu_filters.bzl", "ALL_CPU_FILTERS", "CPU_FILTER_FOR_DEFAULT_PLATFORM") -load("@prelude//apple:apple_bundle_macro_layer.bzl", "apple_bundle_macro_impl") -load("@prelude//apple:apple_macro_layer.bzl", "apple_binary_macro_impl", "apple_library_macro_impl", "apple_package_macro_impl") -load("@prelude//apple:apple_test_macro_layer.bzl", "apple_test_macro_impl") +load("@prelude//apple:apple_macro_layer.bzl", "apple_binary_macro_impl", "apple_bundle_macro_impl", "apple_library_macro_impl", "apple_package_macro_impl", "apple_test_macro_impl") load("@prelude//apple/swift:swift_toolchain_macro_layer.bzl", "swift_toolchain_macro_impl") load("@prelude//cxx:cxx_toolchain.bzl", "cxx_toolchain_inheriting_target_platform") load("@prelude//cxx:cxx_toolchain_macro_layer.bzl", "cxx_toolchain_macro_impl") diff --git a/prelude/ocaml/ocaml.bzl b/prelude/ocaml/ocaml.bzl index 2fa1bc5153..abd652b83b 100644 --- a/prelude/ocaml/ocaml.bzl +++ b/prelude/ocaml/ocaml.bzl @@ -69,6 +69,7 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", + "LinkerFlags", "MergedLinkInfo", "ObjectsLinkable", "create_merged_link_info", @@ -77,7 +78,10 @@ load( ) load( "@prelude//linking:linkable_graph.bzl", + "LinkableGraph", "create_linkable_graph", + "create_linkable_graph_node", + "create_linkable_node", ) load( "@prelude//linking:shared_libraries.bzl", @@ -215,6 +219,34 @@ def _mk_ocaml_compiler(ctx: AnalysisContext, env: dict[str, typing.Any], build_m script_args = _mk_script(ctx, script_name, [compiler], env) return script_args +def _get_empty_link_infos() -> dict[LibOutputStyle, LinkInfos]: + infos = {} + for output_style in LibOutputStyle: + infos[output_style] = LinkInfos(default = LinkInfo()) + return infos + +def _get_linkable_graph( + ctx: AnalysisContext, + deps: list[Dependency] = [], + link_infos: dict[LibOutputStyle, LinkInfos] = {}, + linker_flags: [LinkerFlags, None] = None) -> LinkableGraph: + if not deps: + deps = ctx.attrs.deps + return create_linkable_graph( + ctx, + node = create_linkable_graph_node( + ctx, + linkable_node = create_linkable_node( + ctx, + default_soname = None, + deps = deps, + link_infos = link_infos if link_infos else _get_empty_link_infos(), + linker_flags = linker_flags, + ), + ), + deps = deps, + ) + # A command initialized with flags common to all compiler commands. def _compiler_cmd(ctx: AnalysisContext, compiler: cmd_args, cc: cmd_args) -> cmd_args: ocaml_toolchain = ctx.attrs._ocaml_toolchain[OCamlToolchainInfo] @@ -685,10 +717,7 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: merge_shared_libraries(ctx.actions, deps = filter_and_map_idx(SharedLibraryInfo, _attr_deps(ctx))), merge_link_group_lib_info(deps = _attr_deps(ctx)), other_outputs_info, - create_linkable_graph( - ctx, - deps = _attr_deps(ctx), - ), + _get_linkable_graph(ctx), ] def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: @@ -822,12 +851,13 @@ def ocaml_object_impl(ctx: AnalysisContext) -> list[Provider]: ocaml_toolchain_runtime_deps = ocaml_toolchain.runtime_dep_link_extras linker_type = cxx_toolchain.linker_info.type link_infos = {} + linker_flags = [cmd_args(f) for f in ocaml_toolchain.runtime_dep_link_flags] for output_style in LibOutputStyle: link_infos[output_style] = LinkInfos(default = LinkInfo( linkables = [ ObjectsLinkable(objects = [obj], linker_type = linker_type), ], - post_flags = [cmd_args(f) for f in ocaml_toolchain.runtime_dep_link_flags], + post_flags = linker_flags, )) obj_link_info = create_merged_link_info( @@ -871,10 +901,7 @@ def ocaml_object_impl(ctx: AnalysisContext) -> list[Provider]: obj_link_info, merge_link_group_lib_info(deps = deps), merge_shared_libraries(ctx.actions, deps = filter_and_map_idx(SharedLibraryInfo, deps)), - create_linkable_graph( - ctx, - deps = deps, - ), + _get_linkable_graph(ctx, deps, link_infos, LinkerFlags(post_flags = linker_flags)), ] # `ocaml_shared` enables one to produce an OCaml "plugin". Such native code @@ -945,6 +972,7 @@ def ocaml_shared_impl(ctx: AnalysisContext) -> list[Provider]: return [ DefaultInfo(default_output = binary_nat, sub_targets = sub_targets), + _get_linkable_graph(ctx), ] def prebuilt_ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: @@ -1010,8 +1038,5 @@ def prebuilt_ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: create_merged_link_info_for_propagation(ctx, native_infos), merge_link_group_lib_info(deps = ctx.attrs.deps), merge_shared_libraries(ctx.actions, deps = filter_and_map_idx(SharedLibraryInfo, ctx.attrs.deps)), - create_linkable_graph( - ctx, - deps = ctx.attrs.deps, - ), + _get_linkable_graph(ctx), ] diff --git a/prelude/python/cxx_python_extension.bzl b/prelude/python/cxx_python_extension.bzl index 6dbd606872..87099183e6 100644 --- a/prelude/python/cxx_python_extension.bzl +++ b/prelude/python/cxx_python_extension.bzl @@ -129,7 +129,7 @@ def cxx_python_extension_impl(ctx: AnalysisContext) -> list[Provider]: sub_targets = cxx_library_info.sub_targets if extension.pdb: - sub_targets[PDB_SUB_TARGET] = get_pdb_providers(extension.pdb) + sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = extension.pdb, binary = extension.output) cxx_toolchain = get_cxx_toolchain_info(ctx) dumpbin_toolchain_path = cxx_toolchain.dumpbin_toolchain_path diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index e114a0e5bd..cdbdef06af 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -151,9 +151,14 @@ def make_py_package( resulting binary. - hidden_resources: extra resources the binary depends on. """ + srcs = [] + srcs.extend(pex_modules.manifests.src_manifests()) + + if pex_modules.extensions: + srcs.append(pex_modules.extensions.manifest) preload_libraries = _preload_libraries_args(ctx, shared_libraries) - manifest_module = generate_manifest_module(ctx, python_toolchain, pex_modules.manifests.src_manifests()) + manifest_module = generate_manifest_module(ctx, python_toolchain, srcs) common_modules_args, dep_artifacts = _pex_modules_common_args( ctx, pex_modules, @@ -264,55 +269,58 @@ def _make_py_package_impl( package_style, ) bootstrap_args.add(build_args) - if package_style == PackageStyle("standalone"): + if standalone: bootstrap_args.add(ctx.attrs.standalone_build_args) else: bootstrap_args.add(ctx.attrs.inplace_build_args) - if standalone: + # For inplace builds add local artifacts to outputs so they get properly materialized + runtime_files.extend(dep_artifacts) + runtime_files.append((symlink_tree_path, symlink_tree_path.short_path)) + + # For standalone builds, or builds setting make_py_package we generate args for calling make_par.py + if standalone or make_py_package_cmd != None: # We support building _standalone_ packages locally to e.g. support fbcode's # current style of build info stamping (e.g. T10696178). - prefer_local = package_python_locally(ctx, python_toolchain) + prefer_local = standalone and package_python_locally(ctx, python_toolchain) cmd = cmd_args( make_py_package_cmd if make_py_package_cmd != None else python_toolchain.make_py_package_standalone, ) cmd.add(modules_args) cmd.add(bootstrap_args) + if ctx.attrs.runtime_env: + for k, v in ctx.attrs.runtime_env.items(): + cmd.add(cmd_args(["--passthrough", "--runtime_env={}={}".format(k, v)])) cmd.add(cmd_args("--no-sitecustomize")) + identifier_prefix = "standalone{}" if standalone else "inplace{}" ctx.actions.run( cmd, prefer_local = prefer_local, category = "par", - identifier = "standalone{}".format(output_suffix), + identifier = identifier_prefix.format(output_suffix), allow_cache_upload = allow_cache_upload, ) else: - runtime_files.extend(dep_artifacts) - runtime_files.append((symlink_tree_path, symlink_tree_path.short_path)) - if make_py_package_cmd != None: - cmd = cmd_args(make_py_package_cmd) - cmd.add(modules_args) - cmd.add(bootstrap_args) - cmd.add(cmd_args("--no-sitecustomize")) - ctx.actions.run(cmd, category = "par", identifier = "inplace{}".format(output_suffix)) - else: - modules = cmd_args(python_toolchain.make_py_package_modules) - modules.add(modules_args) - ctx.actions.run(modules, category = "par", identifier = "modules{}".format(output_suffix)) - - bootstrap = cmd_args(python_toolchain.make_py_package_inplace) - bootstrap.add(bootstrap_args) - - if ctx.attrs.add_multiprocessing_wrapper and ctx.attrs._exec_os_type[OsLookup].platform == "linux": - # This script will add the preload/library path vars as well as the pythonpath vars to the - # subprocess interpreter so that the spawned process will be able to find the inplace par - # link tree, native libs, and the modules under the link tree. - mp_executable = ctx.actions.declare_output("mp_exec_{}.sh".format(name)) - runtime_files.append((mp_executable, mp_executable.short_path)) - bootstrap.add(["--add-multiprocessing-executable", mp_executable.as_output()]) - ctx.actions.run(bootstrap, category = "par", identifier = "bootstrap{}".format(output_suffix)) + modules = cmd_args(python_toolchain.make_py_package_modules) + modules.add(modules_args) + ctx.actions.run(modules, category = "par", identifier = "modules{}".format(output_suffix)) + + bootstrap = cmd_args(python_toolchain.make_py_package_inplace) + bootstrap.add(bootstrap_args) + if ctx.attrs.runtime_env: + for k, v in ctx.attrs.runtime_env.items(): + bootstrap.add(cmd_args(["--runtime_env", "{}={}".format(k, v)])) + + if ctx.attrs.add_multiprocessing_wrapper and ctx.attrs._exec_os_type[OsLookup].platform == "linux": + # This script will add the preload/library path vars as well as the pythonpath vars to the + # subprocess interpreter so that the spawned process will be able to find the inplace par + # link tree, native libs, and the modules under the link tree. + mp_executable = ctx.actions.declare_output("mp_exec_{}.sh".format(name)) + runtime_files.append((mp_executable, mp_executable.short_path)) + bootstrap.add(["--add-multiprocessing-executable", mp_executable.as_output()]) + ctx.actions.run(bootstrap, category = "par", identifier = "bootstrap{}".format(output_suffix)) run_args = [] diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 933573c341..42f088634a 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -207,7 +207,7 @@ def _split_debuginfo(ctx, data: dict[str, (typing.Any, Label | bool)]) -> (dict[ transformed = {} for name, (artifact, extra) in data.items(): stripped_binary, debuginfo = strip_debug_with_gnu_debuglink(ctx, name, artifact.unstripped_output) - transformed[name] = LinkedObject(output = stripped_binary, unstripped_output = artifact.unstripped_output), extra + transformed[name] = LinkedObject(output = stripped_binary, unstripped_output = artifact.unstripped_output, dwp = artifact.dwp), extra debuginfo_artifacts[name + ".debuginfo"] = debuginfo return transformed, debuginfo_artifacts @@ -655,7 +655,7 @@ def _convert_python_library_to_executable( extra_manifests = create_manifest_for_source_map(ctx, "extra_manifests", extra_artifacts) shared_libraries = {} - debuginfo_artifacts = None + debuginfo_artifacts = {} # Create the map of native libraries to their artifacts and whether they # need to be preloaded. Note that we merge preload deps into regular deps diff --git a/prelude/python/sourcedb/build.bxl b/prelude/python/sourcedb/build.bxl index 83d5f094ff..349ee1be73 100644 --- a/prelude/python/sourcedb/build.bxl +++ b/prelude/python/sourcedb/build.bxl @@ -22,7 +22,7 @@ def _abort_on_build_failure(target_label: TargetLabel, result: "bxl_build_result # Build sourcedb for the given targets, and return a mapping from target names # to the corresponding sourcedb JSON file location. def do_build( - ctx: "bxl_ctx", + ctx: bxl.Context, targets: list[ConfiguredTargetLabel]) -> dict[TargetLabel, Artifact]: # Build sourcedbs of all targets configured_sub_targets = [ diff --git a/prelude/python/sourcedb/classic.bxl b/prelude/python/sourcedb/classic.bxl index 5cb82ac07b..7504ad8129 100644 --- a/prelude/python/sourcedb/classic.bxl +++ b/prelude/python/sourcedb/classic.bxl @@ -5,13 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# @starlark-rust: allow_string_literals_in_type_expr - load(":build.bxl", "do_build") load(":merge.bxl", "do_merge") load(":query.bxl", "do_query") -def _build_entry_point(ctx: "bxl_ctx") -> None: +def _build_entry_point(ctx: bxl.Context) -> None: bxl_actions = ctx.bxl_actions( exec_deps = "prelude//python/tools/sourcedb_merger:legacy_merge", target_platform = "prelude//platforms:default", diff --git a/prelude/python/sourcedb/code_navigation.bxl b/prelude/python/sourcedb/code_navigation.bxl index 0adcb6b33f..fd127ec888 100644 --- a/prelude/python/sourcedb/code_navigation.bxl +++ b/prelude/python/sourcedb/code_navigation.bxl @@ -5,13 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# @starlark-rust: allow_string_literals_in_type_expr - load(":build.bxl", "do_build") load(":merge.bxl", "do_merge") load(":query.bxl", "do_query") -def _build_entry_point(ctx: "bxl_ctx") -> None: +def _build_entry_point(ctx: bxl.Context) -> None: bxl_actions = ctx.bxl_actions( exec_deps = "prelude//python/tools/sourcedb_merger:merge", target_platform = "prelude//platforms:default", diff --git a/prelude/python/sourcedb/merge.bxl b/prelude/python/sourcedb/merge.bxl index 720017c9c0..3d4024dce4 100644 --- a/prelude/python/sourcedb/merge.bxl +++ b/prelude/python/sourcedb/merge.bxl @@ -8,7 +8,7 @@ # @starlark-rust: allow_string_literals_in_type_expr def do_merge( - ctx: "bxl_ctx", + ctx: bxl.Context, bxl_actions: "bxl_actions", built_sourcedbs: dict[TargetLabel, Artifact], command_category: str) -> "ensured_artifact": diff --git a/prelude/python/sourcedb/query.bxl b/prelude/python/sourcedb/query.bxl index bb225d5990..f33468434e 100644 --- a/prelude/python/sourcedb/query.bxl +++ b/prelude/python/sourcedb/query.bxl @@ -61,7 +61,7 @@ def _get_python_library_manifests_from_analysis_result( return python_source_db_info.manifests def _get_python_library_manifests_from_targets( - ctx: "bxl_ctx", + ctx: bxl.Context, targets: "target_set") -> list[PythonLibraryManifestsTSet]: return filter(None, [ _get_python_library_manifests_from_analysis_result(analysis_result) @@ -69,7 +69,7 @@ def _get_python_library_manifests_from_targets( ]) def get_python_library_manifests_tset_from_targets( - ctx: "bxl_ctx", + ctx: bxl.Context, actions: AnalysisActions, root_targets: "target_set") -> PythonLibraryManifestsTSet: return actions.tset( @@ -78,7 +78,7 @@ def get_python_library_manifests_tset_from_targets( ) def get_python_library_manifests_tset_from_target_patterns( - ctx: "bxl_ctx", + ctx: bxl.Context, query: "cqueryctx", actions: AnalysisActions, target_patterns: typing.Any) -> PythonLibraryManifestsTSet: @@ -86,7 +86,7 @@ def get_python_library_manifests_tset_from_target_patterns( return get_python_library_manifests_tset_from_targets(ctx, actions, root_targets) def do_query( - ctx: "bxl_ctx", + ctx: bxl.Context, query: "cqueryctx", actions: AnalysisActions, target_patterns: typing.Any) -> list[ConfiguredTargetLabel]: @@ -102,7 +102,7 @@ def do_query( if manifest.src_types != None ] -def _do_query_entry_point(ctx: "bxl_ctx") -> None: +def _do_query_entry_point(ctx: bxl.Context) -> None: query = ctx.cquery() actions = ctx.bxl_actions().actions targets = do_query(ctx, query, actions, [query.eval(target) for target in ctx.cli_args.target]) diff --git a/prelude/python/tools/make_par/sitecustomize.py b/prelude/python/tools/make_par/sitecustomize.py index 2d91abaf9a..5b29b8225e 100644 --- a/prelude/python/tools/make_par/sitecustomize.py +++ b/prelude/python/tools/make_par/sitecustomize.py @@ -6,6 +6,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +import importlib import multiprocessing.util as mp_util import os import sys @@ -71,6 +72,24 @@ def __clear_env(patch_spawn=True): __patch_spawn(var_names, saved_env) +# pyre-fixme[3]: Return type must be annotated. +def __startup__(): + for name, var in os.environ.items(): + if name.startswith("STARTUP_"): + name, sep, func = var.partition(":") + if sep: + try: + module = importlib.import_module(name) + getattr(module, func)() + except Exception as e: + # TODO: Ignoring errors for now. The way to properly fix this should be to make + # sure we are still at the same binary that configured `STARTUP_` before importing. + print( + "Error running startup function %s:%s: %s" % (name, func, e), + file=sys.stderr, + ) + + # pyre-fixme[3]: Return type must be annotated. def __passthrough_exec_module(): # Delegate this module execution to the next module in the path, if any, @@ -86,4 +105,5 @@ def __passthrough_exec_module(): __clear_env() +__startup__() __passthrough_exec_module() diff --git a/prelude/python/tools/make_py_package_inplace.py b/prelude/python/tools/make_py_package_inplace.py index 7942b5678c..e663c969e1 100755 --- a/prelude/python/tools/make_py_package_inplace.py +++ b/prelude/python/tools/make_py_package_inplace.py @@ -133,6 +133,13 @@ def parse_args() -> argparse.Namespace: ), help="The dynamic loader env used to find native library deps", ) + parser.add_argument( + "-e", + "--runtime_env", + action="append", + default=[], + help="environment variables to set before launching the runtime. (e.g. -e FOO=BAR BAZ=QUX)", + ) # Compatibility with existing make_par scripts parser.add_argument("--passthrough", action="append", default=[]) @@ -142,7 +149,11 @@ def parse_args() -> argparse.Namespace: def write_bootstrapper(args: argparse.Namespace) -> None: """Write the .pex bootstrapper script using a template""" - template = args.template_lite if args.use_lite else args.template + template = ( + args.template_lite + if (args.use_lite and not args.runtime_env) + else args.template + ) with open(template, "r", encoding="utf8") as fin: data = fin.read() @@ -206,6 +217,13 @@ def write_bootstrapper(args: argparse.Namespace) -> None: new_data = new_data.replace("", "LD_PRELOAD") new_data = new_data.replace("", ld_preload) + if args.runtime_env: + runtime_env = dict(e.split("=", maxsplit=1) for e in args.runtime_env) + env = f"os.environ.update({runtime_env!r})" + else: + env = "" + new_data = new_data.replace("", env) + args.output.parent.mkdir(parents=True, exist_ok=True) with open(args.output, "w", encoding="utf8") as fout: fout.write(new_data) diff --git a/prelude/python/tools/make_py_package_manifest_module.py b/prelude/python/tools/make_py_package_manifest_module.py index 3c60373f2d..1d8506864c 100755 --- a/prelude/python/tools/make_py_package_manifest_module.py +++ b/prelude/python/tools/make_py_package_manifest_module.py @@ -13,7 +13,7 @@ import argparse import json from pathlib import Path -from typing import Optional, Set +from typing import Dict, Optional def parse_args() -> argparse.Namespace: @@ -44,9 +44,9 @@ def parse_args() -> argparse.Namespace: def path_to_module(path: str) -> Optional[str]: - if not path.endswith(".py"): - return None - return path[:-3].replace("/", ".") + for suffix in (".py", ".so", ".pyd"): + if path.endswith(suffix): + return path[: -len(suffix)].replace("/", ".").replace("\\", ".") def main() -> None: @@ -57,16 +57,22 @@ def main() -> None: f"Output path '{output}' already exists, refusing to overwrite." ) - modules: Set[str] = set() + modules: Dict[str, str] = {} for module_manifest_file in args.module_manifests: with open(module_manifest_file) as f: - for pkg_path, *_ in json.load(f): - modules.add(pkg_path) + for pkg_path, _, origin_desc in json.load(f): + module = path_to_module(pkg_path) + if module: + modules[module] = origin_desc # Add artificial __init__.py files like in make_py_package_modules.py for parent in Path(pkg_path).parents: if parent == Path("") or parent == Path("."): continue - modules.add(str(parent / "__init__.py")) + path = str(parent / "__init__.py") + module = path_to_module(path) + if module and module not in modules: + modules[module] = origin_desc + entries = {} if args.manifest_entries: with open(args.manifest_entries) as f: @@ -77,7 +83,9 @@ def main() -> None: ) if "modules" in entries: raise ValueError("'modules' can't be a key in manifest entries") - entries["modules"] = sorted(filter(None, (path_to_module(m) for m in modules))) + sorted_modules = sorted(modules.items()) + entries["modules"] = [m[0] for m in sorted_modules] + entries["origins"] = tuple(m[1] for m in sorted_modules) output.write_text( "\n".join((f"{key} = {repr(value)}" for key, value in entries.items())) ) diff --git a/prelude/python/tools/run_inplace.py.in b/prelude/python/tools/run_inplace.py.in index eda4d1cd1a..1c750d64d6 100644 --- a/prelude/python/tools/run_inplace.py.in +++ b/prelude/python/tools/run_inplace.py.in @@ -49,6 +49,8 @@ if native_libs_preload is not None: for l in native_libs_preload.split(":") ) + + # Note: this full block of code will be included as the argument to Python, # and will be the first thing that shows up in the process arguments as displayed # by programs like ps and top. @@ -75,7 +77,7 @@ def __run(): # Replace the working directory with location of the modules directory. assert sys.path[0] == '' - sys.path[0] = {os.path.join(dirpath, modules_dir)!r} + del sys.path[0] import os import runpy @@ -134,6 +136,11 @@ if ( ): args[1:1] = ["-X", "importtime"] +path = os.path.join(dirpath, modules_dir) +if "PYTHONPATH" in os.environ: + path += os.pathsep + os.environ["PYTHONPATH"] +os.environ["PYTHONPATH"] = path + if platform.system() == "Windows": # exec on Windows is not true exec - there is only 'spawn' ('CreateProcess'). # However, creating processes unnecessarily is painful, so we only do the spawn diff --git a/prelude/python/tools/run_inplace_lite.py.in b/prelude/python/tools/run_inplace_lite.py.in index 8529f2cfd8..43e160bf79 100644 --- a/prelude/python/tools/run_inplace_lite.py.in +++ b/prelude/python/tools/run_inplace_lite.py.in @@ -17,6 +17,7 @@ modules_dir = "" def __run(): import sys import os + import site import time os.environ["PAR_LAUNCH_TIMESTAMP"] = str(time.time()) @@ -33,6 +34,8 @@ def __run(): # Replace the working directory with location of the modules directory. sys.path[0] = os.path.join(dirpath, modules_dir) + site.execsitecustomize() + from import as run_as_main run_as_main(main_module, main_function) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 999b1017b0..70af98e91a 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -304,7 +304,6 @@ def _python_executable_attrs(): "anonymous_link_groups": attrs.bool(default = False), "binary_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "bolt_flags": attrs.list(attrs.arg(), default = []), - "bolt_gdb_index": attrs.option(attrs.source(), default = None), "bolt_profile": attrs.option(attrs.source(), default = None), "compiler_flags": attrs.list(attrs.arg(), default = []), "constraint_overrides": attrs.list(attrs.string(), default = []), @@ -321,11 +320,13 @@ def _python_executable_attrs(): attrs.string(), default = None, doc = """ - Fully qualified name of a Python function that will serve as the main entry point of the binary. - - This should usually be a function defined within one of the - dependencies of this target. This attribute should be preferred over - `main_module` or `main`, and it is an error to specify more than one of these. + Name of a Python function that will serve as the main entry point of + the binary. The name is either a fully qualified name like + `foo.bar.baz` or it starts with a `.` like `.bar.baz`, in which case + it is relative to the package containing the target. This should + usually be a function defined within one of the dependencies of this + target. This attribute should be preferred over `main_module` or + `main`, and it is an error to specify more than one of these. """, ), "make_py_package": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), @@ -341,6 +342,7 @@ def _python_executable_attrs(): "package_split_dwarf_dwp": attrs.bool(default = False), "par_style": attrs.option(attrs.string(), default = None), "resources": attrs.named_set(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), sorted = True, default = []), + "runtime_env": attrs.option(attrs.dict(key = attrs.string(), value = attrs.string()), default = None), "standalone_build_args": attrs.list(attrs.arg(), default = []), "static_extension_finder": attrs.source(default = "prelude//python/tools:static_extension_finder.py"), "static_extension_utils": attrs.source(default = "prelude//python/tools:static_extension_utils.cpp"), @@ -370,7 +372,6 @@ def _cxx_binary_and_test_attrs(): # top-level binary context. "binary_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "bolt_flags": attrs.list(attrs.arg(), default = []), - "bolt_gdb_index": attrs.option(attrs.source(), default = None), "bolt_profile": attrs.option(attrs.source(), default = None), "enable_distributed_thinlto": attrs.bool(default = False), "link_execution_preference": link_execution_preference_attr(), diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 788854501a..b5d4346c97 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -209,7 +209,9 @@ def generate_rustdoc_test( link_style: LinkStyle, library: RustLinkStyleInfo, params: BuildParams, - default_roots: list[str]) -> cmd_args: + default_roots: list[str]) -> (cmd_args, dict[str, cmd_args]): + exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" + toolchain_info = compile_ctx.toolchain_info resources = create_resource_db( @@ -272,7 +274,7 @@ def generate_rustdoc_test( allow_args = True, ) - if ctx.attrs._exec_os_type[OsLookup].platform == "windows": + if exec_is_windows: runtool = ["--runtool=cmd.exe", "--runtool-arg=/V:OFF", "--runtool-arg=/C"] else: runtool = ["--runtool=/usr/bin/env"] @@ -297,13 +299,26 @@ def generate_rustdoc_test( rustdoc_cmd.hidden(compile_ctx.symlinked_srcs, link_args_output.hidden, runtime_files) - return _long_command( + rustdoc_cmd = _long_command( ctx = ctx, exe = toolchain_info.rustdoc, args = rustdoc_cmd, argfile_name = "{}.args".format(common_args.subdir), ) + plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) + rustdoc_env = plain_env | path_env + + # Pass everything in env + doc_env, except ones with value None in doc_env. + for k, v in ctx.attrs.doc_env.items(): + if v == None: + rustdoc_env.pop(k, None) + else: + rustdoc_env[k] = cmd_args(v) + rustdoc_env["RUSTC_BOOTSTRAP"] = cmd_args("1") # for `-Zunstable-options` + + return (rustdoc_cmd, rustdoc_env) + # Generate multiple compile artifacts so that distinct sets of artifacts can be # generated concurrently. def rust_compile_multi( @@ -1117,6 +1132,9 @@ def _long_command( argfile, hidden = ctx.actions.write(argfile_name, args, allow_args = True) return cmd_args(exe, cmd_args(argfile, format = "@{}")).hidden(args, hidden) +_DOUBLE_ESCAPED_NEWLINE_RE = regex("\\\\n") +_ESCAPED_NEWLINE_RE = regex("\\n") + # Separate env settings into "plain" and "with path". Path env vars are often # used in Rust `include!()` and similar directives, which always interpret the # path relative to the source file containing the directive. Since paths in env @@ -1144,7 +1162,7 @@ def _process_env( # Will be unescaped in rustc_action. # Variable may have "\\n" as well. # Example: \\n\n -> \\\n\n -> \\\\n\\n - plain_env[k] = v.replace_regex("\\\\n", "\\\n").replace_regex("\\n", "\\n") + plain_env[k] = v.replace_regex(_DOUBLE_ESCAPED_NEWLINE_RE, "\\\n").replace_regex(_ESCAPED_NEWLINE_RE, "\\n") # If CARGO_MANIFEST_DIR is not already expressed in terms of $(location ...) # of some target, then interpret it as a relative path inside of the crate's diff --git a/prelude/rust/rust-analyzer/resolve_deps.bxl b/prelude/rust/rust-analyzer/resolve_deps.bxl index 4e9b88b5e1..d66501e350 100644 --- a/prelude/rust/rust-analyzer/resolve_deps.bxl +++ b/prelude/rust/rust-analyzer/resolve_deps.bxl @@ -64,6 +64,7 @@ def _process_target_config(ctx, target, in_workspace): # Always generate the source folder. Let rust-project resolve whether or not to use it copy["source_folder"] = materialize(ctx, target) copy["label"] = target.label.raw_target() + copy["project_relative_buildfile"] = ctx.fs.project_rel_path(target.buildfile_path) copy["kind"] = target.rule_type copy["in_workspace"] = in_workspace return copy diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index fcd2499bcb..28ba7287cf 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -314,7 +314,7 @@ def _rust_binary_common( ] if pdb: - sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb) + sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = pdb, binary = compiled_outputs.link) dupmbin_toolchain = compile_ctx.cxx_toolchain_info.dumpbin_toolchain_path if dupmbin_toolchain: diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 64b9a50925..f6873aab6b 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -307,7 +307,6 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: providers = [] providers += _default_providers( - ctx = ctx, lang_style_param = lang_style_param, param_artifact = rust_param_artifact, rustdoc = rustdoc, @@ -463,11 +462,10 @@ def _handle_rust_artifact( ) def _default_providers( - ctx: AnalysisContext, lang_style_param: dict[(LinkageLang, LinkStyle), BuildParams], param_artifact: dict[BuildParams, RustLinkStyleInfo], rustdoc: Artifact, - rustdoc_test: [cmd_args, None], + rustdoc_test: [(cmd_args, dict[str, cmd_args]), None], check_artifacts: dict[str, Artifact], expand: Artifact, sources: Artifact) -> list[Provider]: @@ -486,7 +484,7 @@ def _default_providers( link_style_info = param_artifact[lang_style_param[(LinkageLang("rust"), link_style)]] nested_sub_targets = {} if link_style_info.pdb: - nested_sub_targets[PDB_SUB_TARGET] = get_pdb_providers(link_style_info.pdb) + nested_sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = link_style_info.pdb, binary = link_style_info.rlib) sub_targets[link_style.value] = [DefaultInfo( default_output = link_style_info.rlib, sub_targets = nested_sub_targets, @@ -495,20 +493,12 @@ def _default_providers( providers = [] if rustdoc_test: - # Pass everything in env + doc_env, except ones with value None in doc_env. - doc_env = dict(ctx.attrs.env) - for k, v in ctx.attrs.doc_env.items(): - if v == None: - doc_env.pop(k, None) - else: - doc_env[k] = v - doc_env["RUSTC_BOOTSTRAP"] = "1" # for `-Zunstable-options` - + (rustdoc_cmd, rustdoc_env) = rustdoc_test rustdoc_test_info = ExternalRunnerTestInfo( type = "rustdoc", - command = [rustdoc_test], + command = [rustdoc_cmd], run_from_project_root = True, - env = doc_env, + env = rustdoc_env, ) # Run doc test as part of `buck2 test :crate` diff --git a/prelude/rust/tools/buildscript_run.py b/prelude/rust/tools/buildscript_run.py index 85f6d98bb6..69a96f3389 100755 --- a/prelude/rust/tools/buildscript_run.py +++ b/prelude/rust/tools/buildscript_run.py @@ -15,7 +15,14 @@ import subprocess import sys from pathlib import Path -from typing import Dict, IO, NamedTuple +from typing import Any, Dict, IO, NamedTuple + + +IS_WINDOWS: bool = os.name == "nt" + + +def eprint(*args: Any, **kwargs: Any) -> None: + print(*args, end="\n", file=sys.stderr, flush=True, **kwargs) def cfg_env(rustc_cfg: Path) -> Dict[str, str]: @@ -111,23 +118,41 @@ def ensure_rustc_available( assert rustc is not None, "RUSTC env is missing" assert target is not None, "TARGET env is missing" - if os.path.dirname(rustc) != "": - rustc = os.path.join(cwd, rustc) - # NOTE: `HOST` is optional. host = env.get("HOST") try: - subprocess.check_output([rustc, "--version"]) + # Run through cmd.exe on Windows so if rustc is a batch script + # (like the command_alias trampoline is), it is found relative to + # cwd. + # + # Executing `os.path.join(cwd, rustc)` would also work, but because + # of `../` in the path, it's possible to hit path length limits. + # Resolving it would remove the `..` but then sometimes things + # fail with exit code `3221225725` ("out of stack memory"). + # I suspect it's some infinite loop brought about by the trampoline + # and symlinks. + subprocess.check_output( # noqa: P204 + [rustc, "--version"], + cwd=cwd, + shell=IS_WINDOWS, + ) # A multiplexed sysroot may involve another fetch, # so pass `--target` to check that too. if host != target: - subprocess.check_output([rustc, f"--target={target}", "--version"]) + subprocess.check_output( # noqa: P204 + [rustc, f"--target={target}", "--version"], + cwd=cwd, + shell=IS_WINDOWS, + ) except OSError as ex: - print(f"Failed to run {rustc} because {ex}", file=sys.stderr) + eprint(f"Failed to run {rustc} because {ex}") sys.exit(1) except subprocess.CalledProcessError as ex: - print(f"Failed to run {ex.cmd}: {ex.stderr}", file=sys.stderr) + eprint(f"Command failed with exit code {ex.returncode}") + eprint(f"Command: {ex.cmd}") + if ex.stdout: + eprint(f"Stdout: {ex.stdout}") sys.exit(1) @@ -195,7 +220,7 @@ def main() -> None: # noqa: C901 if cargo_rustc_cfg_match: flags += "--cfg={}\n".format(cargo_rustc_cfg_match.group(1)) else: - print(line) + print(line, end="\n") args.outfile.write(flags) diff --git a/prelude/rust/tools/failure_filter_action.py b/prelude/rust/tools/failure_filter_action.py index 6fd2342ee7..155c93ad5b 100755 --- a/prelude/rust/tools/failure_filter_action.py +++ b/prelude/rust/tools/failure_filter_action.py @@ -16,7 +16,11 @@ import os import shutil import sys -from typing import IO, List, NamedTuple, Optional, Tuple +from typing import Any, IO, List, NamedTuple, Optional, Tuple + + +def eprint(*args: Any, **kwargs: Any) -> None: + print(*args, end="\n", file=sys.stderr, flush=True, **kwargs) class Args(NamedTuple): @@ -69,10 +73,7 @@ def main() -> int: # Fall back to real copy if that doesn't work shutil.copy(inp, out) else: - print( - f"Missing required input file {short} ({inp})", - file=sys.stderr, - ) + eprint(f"Missing required input file {short} ({inp})") return build_status["status"] # If all the required files were present, then success regardless of diff --git a/prelude/test/inject_test_run_info.bzl b/prelude/test/inject_test_run_info.bzl index c3bb2ce7f0..811d13a1cc 100644 --- a/prelude/test/inject_test_run_info.bzl +++ b/prelude/test/inject_test_run_info.bzl @@ -5,8 +5,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# @starlark-rust: allow_string_literals_in_type_expr - def inject_test_run_info(ctx: AnalysisContext, test_info: ExternalRunnerTestInfo) -> list[Provider]: # Access this here so we get failures in CI if we forget to inject it # anywhere, regardless of whether an `env` is used. @@ -28,7 +26,7 @@ def inject_test_run_info(ctx: AnalysisContext, test_info: ExternalRunnerTestInfo return [test_info, RunInfo(args = [inject_test_env, env_file, "--", test_info.command])] -def _maybe_relativize_path(test_info: ExternalRunnerTestInfo, cell_root: "cell_root", arg: cmd_args) -> cmd_args: +def _maybe_relativize_path(test_info: ExternalRunnerTestInfo, cell_root: CellRoot, arg: cmd_args) -> cmd_args: if test_info.run_from_project_root: return arg return arg.relative_to(cell_root) diff --git a/prelude/toolchains/demo.bzl b/prelude/toolchains/demo.bzl new file mode 100644 index 0000000000..608602fe6f --- /dev/null +++ b/prelude/toolchains/demo.bzl @@ -0,0 +1,54 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//toolchains:cxx.bzl", "system_cxx_toolchain") +load("@prelude//toolchains:genrule.bzl", "system_genrule_toolchain") +load("@prelude//toolchains:haskell.bzl", "system_haskell_toolchain") +load("@prelude//toolchains:ocaml.bzl", "system_ocaml_toolchain") +load("@prelude//toolchains:python.bzl", "system_python_bootstrap_toolchain", "system_python_toolchain") +load("@prelude//toolchains:rust.bzl", "system_rust_toolchain") + +def system_demo_toolchains(): + """ + All the default toolchains, suitable for a quick demo or early prototyping. + Most real projects should copy/paste the implementation to configure them. + """ + system_cxx_toolchain( + name = "cxx", + visibility = ["PUBLIC"], + ) + + system_genrule_toolchain( + name = "genrule", + visibility = ["PUBLIC"], + ) + + system_haskell_toolchain( + name = "haskell", + visibility = ["PUBLIC"], + ) + + system_ocaml_toolchain( + name = "ocaml", + visibility = ["PUBLIC"], + ) + + system_python_toolchain( + name = "python", + visibility = ["PUBLIC"], + ) + + system_python_bootstrap_toolchain( + name = "python_bootstrap", + visibility = ["PUBLIC"], + ) + + system_rust_toolchain( + name = "rust", + default_edition = "2021", + visibility = ["PUBLIC"], + ) diff --git a/prelude/toolchains/haskell.bzl b/prelude/toolchains/haskell.bzl new file mode 100644 index 0000000000..9d5b02f16d --- /dev/null +++ b/prelude/toolchains/haskell.bzl @@ -0,0 +1,29 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//haskell:haskell.bzl", "HaskellPlatformInfo", "HaskellToolchainInfo") + +def _system_haskell_toolchain(_ctx: AnalysisContext) -> list[Provider]: + return [ + DefaultInfo(), + HaskellToolchainInfo( + compiler = "ghc", + packager = "ghc-pkg", + linker = "ghc", + compiler_flags = [], + linker_flags = [], + ), + HaskellPlatformInfo( + name = "x86_64", + ), + ] + +system_haskell_toolchain = rule( + impl = _system_haskell_toolchain, + attrs = {}, + is_toolchain_rule = True, +) diff --git a/prelude/toolchains/ocaml.bzl b/prelude/toolchains/ocaml.bzl index 2a96643836..e4eacfa314 100644 --- a/prelude/toolchains/ocaml.bzl +++ b/prelude/toolchains/ocaml.bzl @@ -48,7 +48,10 @@ def _system_ocaml_toolchain_impl(_ctx): ocaml_compiler_flags = [], # e.g. "-opaque" ocamlc_flags = [], ocamlopt_flags = [], - runtime_dep_link_flags = ["-ldl", "-lpthread", "-lzstd"], + # We don't expect /opt/homebrew/lib to exist on Linux but that's not + # a problem. On macOS (aarch64 at least) we expect zstd to live in + # /opt/homebrew/lib. + runtime_dep_link_flags = ["-ldl", "-lpthread", "-L/opt/homebrew/lib", "-lzstd"], runtime_dep_link_extras = [], ), OCamlPlatformInfo(name = "x86_64"), diff --git a/prelude/utils/graph_utils.bzl b/prelude/utils/graph_utils.bzl index 700bc4e590..da9c3e6b7d 100644 --- a/prelude/utils/graph_utils.bzl +++ b/prelude/utils/graph_utils.bzl @@ -7,9 +7,11 @@ load("@prelude//utils:utils.bzl", "expect") -def topo_sort(graph: dict[typing.Any, list[typing.Any]]) -> list[typing.Any]: +def pre_order_traversal( + graph: dict[typing.Any, list[typing.Any]], + node_formatter: typing.Callable = str) -> list[typing.Any]: """ - Topo-sort the given graph. + Perform a pre-order (topologically sorted) traversal of `graph` and return the ordered nodes """ in_degrees = {node: 0 for node in graph} @@ -27,7 +29,7 @@ def topo_sort(graph: dict[typing.Any, list[typing.Any]]) -> list[typing.Any]: for _ in range(len(in_degrees)): if len(queue) == 0: - fail_cycle(graph) + fail_cycle(graph, node_formatter) node = queue.pop() ordered.append(node) @@ -37,12 +39,14 @@ def topo_sort(graph: dict[typing.Any, list[typing.Any]]) -> list[typing.Any]: if in_degrees[dep] == 0: queue.append(dep) - expect(not queue, "finished before processing nodes: {}".format(queue)) + expect(not queue, "finished before processing nodes: {}".format([node_formatter(node) for node in queue])) expect(len(ordered) == len(graph), "missing or duplicate nodes in sort") return ordered -def post_order_traversal(graph: dict[typing.Any, list[typing.Any]]) -> list[typing.Any]: +def post_order_traversal( + graph: dict[typing.Any, list[typing.Any]], + node_formatter: typing.Callable = str) -> list[typing.Any]: """ Performs a post-order traversal of `graph`. """ @@ -64,7 +68,7 @@ def post_order_traversal(graph: dict[typing.Any, list[typing.Any]]) -> list[typi for _ in range(len(out_degrees)): if len(queue) == 0: - fail_cycle(graph) + fail_cycle(graph, node_formatter) node = queue.pop() ordered.append(node) @@ -74,18 +78,20 @@ def post_order_traversal(graph: dict[typing.Any, list[typing.Any]]) -> list[typi if out_degrees[dep] == 0: queue.append(dep) - expect(not queue, "finished before processing nodes: {}".format(queue)) + expect(not queue, "finished before processing nodes: {}".format([node_formatter(node) for node in queue])) expect(len(ordered) == len(graph), "missing or duplicate nodes in sort") return ordered -def fail_cycle(graph: dict[typing.Any, list[typing.Any]]) -> typing.Never: +def fail_cycle( + graph: dict[typing.Any, list[typing.Any]], + node_formatter: typing.Callable) -> typing.Never: cycle = find_cycle(graph) if cycle: fail( "cycle in graph detected: {}".format( " -> ".join( - [str(c) for c in cycle], + [node_formatter(c) for c in cycle], ), ), ) @@ -152,13 +158,13 @@ def post_order_traversal_by( ordered.append(node) return ordered -def topo_sort_by( +def pre_order_traversal_by( roots: list[typing.Any], get_nodes_to_traverse_func) -> list[typing.Any]: """ - Returns a topological sorted list of the nodes in the traversal. + Returns a topological sorted list of the nodes from a pre-order traversal. - Note this gives a different order from topo_sort above (to simplify the implementation). + Note this gives a different order from `pre_order_traversal` above (to simplify the implementation). """ ordered = post_order_traversal_by(roots, get_nodes_to_traverse_func) return ordered[::-1] @@ -178,7 +184,8 @@ def breadth_first_traversal( def breadth_first_traversal_by( graph_nodes: [dict[typing.Any, typing.Any], None], roots: list[typing.Any], - get_nodes_to_traverse_func) -> list[typing.Any]: + get_nodes_to_traverse_func: typing.Callable, + node_formatter: typing.Callable = str) -> list[typing.Any]: """ Performs a breadth first traversal of `graph_nodes`, beginning with the `roots` and queuing the nodes returned by`get_nodes_to_traverse_func`. @@ -201,7 +208,7 @@ def breadth_first_traversal_by( break node = queue.pop() if graph_nodes: - expect(node in graph_nodes, "Expected node {} in graph nodes", node) + expect(node in graph_nodes, "Expected node {} in graph nodes", node_formatter(node)) nodes_to_visit = get_nodes_to_traverse_func(node) for node in nodes_to_visit: if node not in visited: diff --git a/vendir.lock.yml b/vendir.lock.yml index 36f4250a3d..8a3209855a 100644 --- a/vendir.lock.yml +++ b/vendir.lock.yml @@ -2,11 +2,10 @@ apiVersion: vendir.k14s.io/v1alpha1 directories: - contents: - git: - commitTitle: Augment command execution detail information to test run and test - discovery events... - sha: 67ef396223dc8d86990bc082f8df57c1f862ec02 + commitTitle: 'prelude/rust: Process CARGO_MANIFEST_DIR during rustdoc test...' + sha: 98a6e7b70dc82a62784f80897d72fefdfb54d70f tags: - - "2023-10-01" + - "2023-10-15" path: . path: prelude kind: LockConfig diff --git a/vendir.yml b/vendir.yml index 91270ca434..c42af76305 100644 --- a/vendir.yml +++ b/vendir.yml @@ -8,7 +8,7 @@ directories: git: url: https://github.com/facebook/buck2.git # Need to update to $BUCK2_VERSION when new buck2 gets updated via `nix flake update` - ref: '2023-10-01' + ref: '2023-10-15' includePaths: - prelude/**/* newRootPath: prelude