diff --git a/prelude/.buckconfig b/prelude/.buckconfig index 222fc0462a5..1fe72c20338 100644 --- a/prelude/.buckconfig +++ b/prelude/.buckconfig @@ -5,17 +5,6 @@ prelude = . # but our custom config format (yuk) doesn't accept inline comments. # Therefore, we hide the name of the group when not open source. -[not_repositories] # @oss-enable -fbcode = ../.. -fbsource = ../../.. -ovr_config = ../../../arvr/tools/build_defs/config -bazel_skylib = ../../../third-party/bazel-skylib -fbcode_macros = ../../../tools/build_defs/fbcode_macros -fbobjc_dylibs = ../../../tools/build_defs/fbobjc_dylibs -buck = ../../../xplat/build_infra/buck_client -buck_bazel_skylib = ../../../xplat/build_infra/buck_client/third-party/skylark/bazel-skylib -toolchains = ../toolchains - [repository_aliases] [not_repository_aliases] # @oss-enable config = ovr_config diff --git a/prelude/BUCK b/prelude/BUCK index f12663a0517..afce0b50f28 100644 --- a/prelude/BUCK +++ b/prelude/BUCK @@ -1,14 +1,27 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load(":native.bzl", prelude = "native") oncall("build_infra") +source_listing(exclude = [ + # Exclude PACKAGE file using modifiers since those are not enabled everywhere yet. + "PACKAGE", +]) + # Done to avoid triggering a lint rule that replaces glob with an fbcode macro globby = glob srcs = globby( ["**"], # Context: https://fb.workplace.com/groups/buck2users/posts/3121903854732641/ - exclude = ["**/.pyre_configuration.local"], + exclude = [ + "**/.pyre_configuration.local", + # Unfortunately, using modifiers require loading bzl files in outside of prelude, + # and that currently breaks isolated tests that attempt to grab a best-effort prelude + # from the filegroup below. + # TODO: Switch these tests to use the bundled prelude instead. + "PACKAGE", + ], ) # Re-export filegroups that are behind package boundary violations for diff --git a/prelude/abi/BUCK.v2 b/prelude/abi/BUCK.v2 index bb72595e77f..aa06c41471d 100644 --- a/prelude/abi/BUCK.v2 +++ b/prelude/abi/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + config_setting( name = "gnu", constraint_values = [ diff --git a/prelude/abi/constraints/BUCK.v2 b/prelude/abi/constraints/BUCK.v2 index 9b5673523b7..7448fa7c4a7 100644 --- a/prelude/abi/constraints/BUCK.v2 +++ b/prelude/abi/constraints/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + # Used by open source projects to support `prelude//` constraint_setting( diff --git a/prelude/android/aapt2_link.bzl b/prelude/android/aapt2_link.bzl index a21c592f053..46d4fd8cd5a 100644 --- a/prelude/android/aapt2_link.bzl +++ b/prelude/android/aapt2_link.bzl @@ -23,7 +23,7 @@ def get_aapt2_link( no_resource_removal: bool, should_keep_raw_values: bool, package_id_offset: int, - resource_stable_ids: [Artifact, None], + resource_stable_ids: Artifact | None, preferred_density: [str, None], filter_locales: bool, locales: list[str], @@ -117,8 +117,11 @@ def get_aapt2_link( aapt2_compile_rules_args_file = ctx.actions.write("{}/aapt2_compile_rules_args_file".format(identifier), cmd_args(aapt2_compile_rules, delimiter = " ")) aapt2_command.add("-R") - aapt2_command.add(cmd_args(aapt2_compile_rules_args_file, format = "@{}")) - aapt2_command.hidden(aapt2_compile_rules) + aapt2_command.add(cmd_args( + aapt2_compile_rules_args_file, + format = "@{}", + hidden = aapt2_compile_rules, + )) aapt2_command.add(additional_aapt2_params) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 868eccd20b7..2569f59ecf1 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -57,17 +57,38 @@ FORCE_SINGLE_CPU = read_root_config("buck2", "android_force_single_cpu") in ("Tr FORCE_SINGLE_DEFAULT_CPU = read_root_config("buck2", "android_force_single_default_cpu") in ("True", "true") DISABLE_STRIPPING = read_root_config("android", "disable_stripping") in ("True", "true") +# Format is {"ovveride_name": {"re_cap_key": "re_cap_value"}}; for example: +# { +# "dynamic-listing": { +# "platform": "riot", +# "pool": "EUREKA_POOL", +# }, +# "test-execution": { +# "platform": "riot", +# "pool": "EUREKA_POOL", +# }, +# } +_RE_CAPS = attrs.option(attrs.dict(key = attrs.string(), value = attrs.dict(key = attrs.string(), value = attrs.string())), default = None) + +# Format is {"ovveride_name": "re_use_case"}; for example: +# { +# "dynamic-listing": "riot", +# "test-execution": "riot", +# } +_RE_USE_CASE = attrs.option(attrs.dict(key = attrs.string(), value = attrs.string()), default = None) + extra_attributes = { "android_aar": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), "compress_asset_libraries": attrs.default_only(attrs.bool(default = False)), "cpu_filters": attrs.list(attrs.enum(TargetCpuType), default = ALL_CPU_FILTERS), "deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition), default = []), - "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "min_sdk_version": attrs.option(attrs.int(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), "native_library_merge_linker_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.arg())), default = None), - "package_asset_libraries": attrs.default_only(attrs.bool(default = True)), + "package_asset_libraries": attrs.bool(default = True), + "package_resources": attrs.bool(default = True), + "relinker_extra_deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition), default = []), "resources_root": attrs.option(attrs.string(), default = None), "strip_libraries": attrs.default_only(attrs.bool(default = not DISABLE_STRIPPING)), "_android_toolchain": toolchains_common.android(), @@ -83,6 +104,7 @@ extra_attributes = { }, "android_binary": { "aapt_mode": attrs.enum(AaptMode, default = "aapt1"), # Match default in V1 + "application_module_blacklist": attrs.option(attrs.list(attrs.transition_dep(cfg = cpu_transition)), default = None), "application_module_configs": attrs.dict(key = attrs.string(), value = attrs.list(attrs.transition_dep(cfg = cpu_transition)), sorted = False, default = {}), "build_config_values_file": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None), "constraint_overrides": attrs.list(attrs.string(), default = []), @@ -96,6 +118,7 @@ extra_attributes = { "native_library_merge_code_generator": attrs.option(attrs.exec_dep(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), "native_library_merge_linker_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.arg())), default = None), + "relinker_extra_deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition), default = []), "strip_libraries": attrs.bool(default = not DISABLE_STRIPPING), "_android_toolchain": toolchains_common.android(), "_cxx_toolchain": attrs.split_transition_dep(cfg = cpu_split_transition, default = "toolchains//:android-hack"), @@ -115,6 +138,7 @@ extra_attributes = { }, "android_bundle": { "aapt_mode": attrs.enum(AaptMode, default = "aapt1"), # Match default in V1 + "application_module_blacklist": attrs.option(attrs.list(attrs.transition_dep(cfg = cpu_transition)), default = None), "application_module_configs": attrs.dict(key = attrs.string(), value = attrs.list(attrs.transition_dep(cfg = cpu_transition)), sorted = False, default = {}), "build_config_values_file": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None), "deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition), default = []), @@ -127,6 +151,8 @@ extra_attributes = { "native_library_merge_code_generator": attrs.option(attrs.exec_dep(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), "native_library_merge_linker_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.arg())), default = None), + "relinker_extra_deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition), default = []), + "use_derived_apk": attrs.bool(default = False), "_android_toolchain": toolchains_common.android(), "_cxx_toolchain": attrs.split_transition_dep(cfg = cpu_split_transition, default = "toolchains//:android-hack"), "_dex_toolchain": toolchains_common.dex(), @@ -151,24 +177,35 @@ extra_attributes = { "native_library_merge_sequence": attrs.option(attrs.list(attrs.any()), default = None), "_android_toolchain": toolchains_common.android(), "_dex_toolchain": toolchains_common.dex(), + "_exec_os_type": buck.exec_os_type_arg(), "_is_building_android_binary": attrs.default_only(attrs.bool(default = True)), "_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)), "_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)), "_java_toolchain": toolchains_common.java_for_android(), }, "android_instrumentation_test": { - "instrumentation_test_listener": attrs.option(attrs.source(), default = None), + "extra_instrumentation_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.arg()), default = None), + "instrumentation_test_listener": attrs.option(attrs.exec_dep(), default = None), "instrumentation_test_listener_class": attrs.option(attrs.string(), default = None), + "is_self_instrumenting": attrs.bool(default = False), + "re_caps": _RE_CAPS, + "re_use_case": _RE_USE_CASE, "_android_toolchain": toolchains_common.android(), "_exec_os_type": buck.exec_os_type_arg(), + "_java_test_toolchain": toolchains_common.java_for_host_test(), "_java_toolchain": toolchains_common.java_for_android(), }, "android_library": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), + "android_optional_jars": attrs.option(attrs.list(attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.dep(), sorted = True, default = []), "_android_toolchain": toolchains_common.android(), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), + "_compose_stability_config": attrs.option(attrs.source(), default = select({ + "DEFAULT": None, + "fbsource//tools/build_defs/android/compose:enable-compose-stability-config": "fbsource//tools/build_defs/android/compose:stability_config", + })), "_dex_min_sdk_version": attrs.default_only(attrs.option(attrs.int(), default = dex_min_sdk_version())), "_dex_toolchain": toolchains_common.dex(), "_exec_os_type": buck.exec_os_type_arg(), @@ -198,11 +235,16 @@ extra_attributes = { "res": attrs.option(attrs.one_of(attrs.source(allow_directory = True), attrs.dict(key = attrs.string(), value = attrs.source(), sorted = True)), default = None), "_android_toolchain": toolchains_common.android(), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), + "_java_toolchain": toolchains_common.java_for_android(), }, "apk_genrule": genrule_attributes() | { + "default_outs": attrs.option(attrs.set(attrs.string(), sorted = False), default = None), + "outs": attrs.option(attrs.dict(key = attrs.string(), value = attrs.set(attrs.string(), sorted = False), sorted = False), default = None), "type": attrs.string(default = "apk"), + "use_derived_apk": attrs.bool(default = False), "_android_toolchain": toolchains_common.android(), "_exec_os_type": buck.exec_os_type_arg(), + "_java_toolchain": toolchains_common.java_for_android(), }, "gen_aidl": { "import_paths": attrs.list(attrs.arg(), default = []), @@ -215,7 +257,8 @@ extra_attributes = { }, "robolectric_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), + "android_optional_jars": attrs.option(attrs.list(attrs.source()), default = None), + "java_agents": attrs.list(attrs.source(), default = []), "resources_root": attrs.option(attrs.string(), default = None), "robolectric_runtime_dependencies": attrs.list(attrs.source(), default = []), "test_class_names_file": attrs.option(attrs.source(), default = None), diff --git a/prelude/android/android_aar.bzl b/prelude/android/android_aar.bzl index 0ba16c45162..4ebd31b0940 100644 --- a/prelude/android/android_aar.bzl +++ b/prelude/android/android_aar.bzl @@ -14,8 +14,9 @@ load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//android:configuration.bzl", "get_deps_by_platform") load("@prelude//android:cpu_filters.bzl", "CPU_FILTER_FOR_DEFAULT_PLATFORM", "CPU_FILTER_FOR_PRIMARY_PLATFORM") load("@prelude//android:util.bzl", "create_enhancement_context") -load("@prelude//java:java_providers.bzl", "get_all_java_packaging_deps", "get_all_java_packaging_deps_from_packaging_infos") +load("@prelude//java:java_providers.bzl", "create_java_packaging_dep", "get_all_java_packaging_deps", "get_all_java_packaging_deps_from_packaging_infos") load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") +load("@prelude//utils:argfile.bzl", "argfile") load("@prelude//utils:set.bzl", "set") def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: @@ -37,16 +38,23 @@ def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: get_build_config_java_libraries(ctx, build_config_infos, package_type = "release", exopackage_modes = []), )) + enhancement_ctx = create_enhancement_context(ctx) + android_binary_native_library_info = get_android_binary_native_library_info(enhancement_ctx, android_packageable_info, deps_by_platform) + java_packaging_deps.extend([create_java_packaging_dep( + ctx, + lib.library_output.full_library, + ) for lib in android_binary_native_library_info.generated_java_code]) + jars = [dep.jar for dep in java_packaging_deps if dep.jar] classes_jar = ctx.actions.declare_output("classes.jar") java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] classes_jar_cmd = cmd_args([ java_toolchain.jar_builder, "--entries-to-jar", - ctx.actions.write("classes_jar_entries.txt", jars), + argfile(actions = ctx.actions, name = "classes_jar_entries.txt", args = jars), "--output", classes_jar.as_output(), - ]).hidden(jars) + ]) if ctx.attrs.remove_classes: remove_classes_file = ctx.actions.write("remove_classes.txt", ctx.attrs.remove_classes) @@ -59,6 +67,31 @@ def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions.run(classes_jar_cmd, category = "create_classes_jar") + sub_targets = {} + dependency_sources_jars = [dep.sources_jar for dep in java_packaging_deps if dep.sources_jar] + if dependency_sources_jars: + combined_sources_jar = ctx.actions.declare_output("sources.jar") + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] + combined_sources_jar_cmd = cmd_args([ + java_toolchain.jar_builder, + "--entries-to-jar", + argfile(actions = ctx.actions, name = "combined_sources_jar_entries.txt", args = dependency_sources_jars), + "--output", + combined_sources_jar.as_output(), + ]) + + if ctx.attrs.remove_classes: + remove_classes_file = ctx.actions.write("sources_remove_classes.txt", ctx.attrs.remove_classes) + combined_sources_jar_cmd.add([ + "--blocklist-patterns", + remove_classes_file, + "--blocklist-patterns-matcher", + "remove_classes_patterns_matcher", + ]) + + ctx.actions.run(combined_sources_jar_cmd, category = "create_sources_jar") + sub_targets["sources.jar"] = [DefaultInfo(default_output = combined_sources_jar)] + entries = [android_manifest, classes_jar] resource_infos = list(android_packageable_info.resource_infos.traverse()) if android_packageable_info.resource_infos else [] @@ -66,15 +99,15 @@ def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] if resource_infos: res_dirs = [resource_info.res for resource_info in resource_infos if resource_info.res] - if res_dirs: + if ctx.attrs.package_resources and res_dirs: merged_resource_sources_dir = ctx.actions.declare_output("merged_resource_sources_dir/res", dir = True) merge_resource_sources_cmd = cmd_args([ android_toolchain.merge_android_resource_sources[RunInfo], "--resource-paths", - ctx.actions.write("resource_paths.txt", res_dirs), + argfile(actions = ctx.actions, name = "resource_paths.txt", args = res_dirs), "--output", merged_resource_sources_dir.as_output(), - ]).hidden(res_dirs) + ]) ctx.actions.run(merge_resource_sources_cmd, category = "merge_android_resource_sources") @@ -88,28 +121,31 @@ def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: if cxx_resources: entries.append(cxx_resources) - enhancement_ctx = create_enhancement_context(ctx) - android_binary_native_library_info = get_android_binary_native_library_info(enhancement_ctx, android_packageable_info, deps_by_platform) - native_libs_file = ctx.actions.write("native_libs_entries.txt", android_binary_native_library_info.native_libs_for_primary_apk) - native_libs_assets_file = ctx.actions.write("native_libs_assets_entries.txt", android_binary_native_library_info.root_module_native_lib_assets) + native_libs_file = argfile(actions = ctx.actions, name = "native_libs_entries.txt", args = android_binary_native_library_info.native_libs_for_primary_apk) + native_libs_assets_file = argfile(actions = ctx.actions, name = "native_libs_assets_entries.txt", args = android_binary_native_library_info.root_module_native_lib_assets) entries_file = ctx.actions.write("entries.txt", entries) aar = ctx.actions.declare_output("{}.aar".format(ctx.label.name)) - create_aar_cmd = cmd_args([ - android_toolchain.aar_builder, - "--output_path", - aar.as_output(), - "--entries_file", - entries_file, - "--on_duplicate_entry", - "fail", - "--native_libs_file", - native_libs_file, - "--native_libs_assets_file", - native_libs_assets_file, - ]).hidden(entries, android_binary_native_library_info.native_libs_for_primary_apk, android_binary_native_library_info.root_module_native_lib_assets) + create_aar_cmd = cmd_args( + [ + android_toolchain.aar_builder, + "--output_path", + aar.as_output(), + "--entries_file", + entries_file, + "--on_duplicate_entry", + "fail", + "--native_libs_file", + native_libs_file, + "--native_libs_assets_file", + native_libs_assets_file, + ], + hidden = [ + entries, + ], + ) ctx.actions.run(create_aar_cmd, category = "create_aar") - return [DefaultInfo(default_outputs = [aar], sub_targets = enhancement_ctx.get_sub_targets())] + return [DefaultInfo(default_outputs = [aar], sub_targets = enhancement_ctx.get_sub_targets() | sub_targets)] diff --git a/prelude/android/android_apk.bzl b/prelude/android/android_apk.bzl index 94ab2a5c44a..66bf15c405f 100644 --- a/prelude/android/android_apk.bzl +++ b/prelude/android/android_apk.bzl @@ -9,9 +9,12 @@ load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_binary.bzl", "get_binary_info") load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidApkUnderTestInfo", "AndroidBinaryNativeLibsInfo", "AndroidBinaryResourcesInfo", "DexFilesInfo", "ExopackageInfo") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//java:class_to_srcs.bzl", "merge_class_to_source_map_from_jar") load("@prelude//java:java_providers.bzl", "KeystoreInfo") +load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") load("@prelude//java/utils:java_utils.bzl", "get_class_to_source_map_info") +load("@prelude//utils:argfile.bzl", "argfile") load("@prelude//utils:set.bzl", "set") def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: @@ -33,6 +36,7 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: resources_info = resources_info, compress_resources_dot_arsc = ctx.attrs.resource_compression == "enabled" or ctx.attrs.resource_compression == "enabled_with_strings_as_assets", validation_deps_outputs = get_validation_deps_outputs(ctx), + packaging_options = ctx.attrs.packaging_options, ) if dex_files_info.secondary_dex_exopackage_info or native_library_info.exopackage_info or resources_info.exopackage_info: @@ -41,25 +45,41 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: native_library_info = native_library_info.exopackage_info, resources_info = resources_info.exopackage_info, ) - exopackage_outputs = _get_exopackage_outputs(exopackage_info) - default_output = ctx.actions.write("exopackage_apk_warning", "exopackage apks should not be used externally, try buck install or building with exopackage disabled\n") + default_output = ctx.actions.write( + "{}_exopackage_apk_warning".format(ctx.label.name), + "exopackage apks should not be used externally, try buck install or building with exopackage disabled\n", + ) sub_targets["exo_apk"] = [DefaultInfo(default_output = output_apk)] # Used by tests else: exopackage_info = None - exopackage_outputs = [] default_output = output_apk - class_to_srcs, class_to_srcs_subtargets = get_class_to_source_map_info( + class_to_srcs, _, class_to_srcs_subtargets = get_class_to_source_map_info( ctx, outputs = None, deps = android_binary_info.deps_by_platform[android_binary_info.primary_platform], ) + transitive_class_to_src_map = merge_class_to_source_map_from_jar( + actions = ctx.actions, + name = ctx.label.name + ".transitive_class_to_src.json", + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], + relative_to = None, + deps = [class_to_srcs], + ) + sub_targets["transitive_class_to_src_map"] = [DefaultInfo(default_output = transitive_class_to_src_map)] # We can only be sure that an APK has native libs if it has any shared libraries. Prebuilt native libraries dirs can exist but be empty. definitely_has_native_libs = bool(native_library_info.shared_libraries) + install_info = get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info, definitely_has_native_libs = definitely_has_native_libs) + return [ - AndroidApkInfo(apk = output_apk, manifest = resources_info.manifest, materialized_artifacts = android_binary_info.materialized_artifacts), + AndroidApkInfo( + apk = output_apk, + manifest = resources_info.manifest, + materialized_artifacts = android_binary_info.materialized_artifacts, + unstripped_shared_libraries = native_library_info.unstripped_shared_libraries, + ), AndroidApkUnderTestInfo( java_packaging_deps = set([dep.label.raw_target() for dep in java_packaging_deps]), keystore = keystore, @@ -71,8 +91,8 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: r_dot_java_packages = set([info.specified_r_dot_java_package for info in resources_info.unfiltered_resource_infos if info.specified_r_dot_java_package]), shared_libraries = set(native_library_info.shared_libraries), ), - DefaultInfo(default_output = default_output, other_outputs = exopackage_outputs + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), - get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info, definitely_has_native_libs = definitely_has_native_libs), + DefaultInfo(default_output = default_output, other_outputs = install_info.files.values() + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), + install_info, TemplatePlaceholderInfo( keyed_variables = { "classpath": cmd_args([dep.jar for dep in java_packaging_deps if dep.jar], delimiter = get_path_separator_for_exec_os(ctx)), @@ -91,10 +111,11 @@ def build_apk( native_library_info: AndroidBinaryNativeLibsInfo, resources_info: AndroidBinaryResourcesInfo, compress_resources_dot_arsc: bool = False, - validation_deps_outputs: [list[Artifact], None] = None) -> Artifact: + validation_deps_outputs: [list[Artifact], None] = None, + packaging_options: dict | None = None) -> Artifact: output_apk = actions.declare_output("{}.apk".format(label.name)) - apk_builder_args = cmd_args([ + apk_builder_args = cmd_args( android_toolchain.apk_builder[RunInfo], "--output-apk", output_apk.as_output(), @@ -108,17 +129,12 @@ def build_apk( keystore.properties, "--zipalign_tool", android_toolchain.zipalign[RunInfo], - ]) - - # The outputs of validation_deps need to be added as hidden arguments - # to an action for the validation_deps targets to be built and enforced. - if validation_deps_outputs: - apk_builder_args.hidden(validation_deps_outputs) - - if android_toolchain.package_meta_inf_version_files: - apk_builder_args.add("--package-meta-inf-version-files") - if compress_resources_dot_arsc: - apk_builder_args.add("--compress-resources-dot-arsc") + "--package-meta-inf-version-files" if android_toolchain.package_meta_inf_version_files else [], + "--compress-resources-dot-arsc" if compress_resources_dot_arsc else [], + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + hidden = validation_deps_outputs or [], + ) asset_directories = ( native_library_info.root_module_native_lib_assets + @@ -127,15 +143,11 @@ def build_apk( dex_files_info.non_root_module_secondary_dex_dirs + resources_info.module_manifests ) - asset_directories_file = actions.write("asset_directories.txt", asset_directories) - apk_builder_args.hidden(asset_directories) - native_library_directories = actions.write("native_library_directories", native_library_info.native_libs_for_primary_apk) - apk_builder_args.hidden(native_library_info.native_libs_for_primary_apk) + asset_directories_file = argfile(actions = actions, name = "asset_directories.txt", args = asset_directories) + native_library_directories = argfile(actions = actions, name = "native_library_directories", args = native_library_info.native_libs_for_primary_apk) all_zip_files = [resources_info.packaged_string_assets] if resources_info.packaged_string_assets else [] - zip_files = actions.write("zip_files", all_zip_files) - apk_builder_args.hidden(all_zip_files) - jar_files_that_may_contain_resources = actions.write("jar_files_that_may_contain_resources", resources_info.jar_files_that_may_contain_resources) - apk_builder_args.hidden(resources_info.jar_files_that_may_contain_resources) + zip_files = argfile(actions = actions, name = "zip_files", args = all_zip_files) + jar_files_that_may_contain_resources = argfile(actions = actions, name = "jar_files_that_may_contain_resources", args = resources_info.jar_files_that_may_contain_resources) apk_builder_args.add([ "--asset-directories-list", @@ -148,6 +160,13 @@ def build_apk( jar_files_that_may_contain_resources, ]) + if packaging_options: + for key, value in packaging_options.items(): + if key != "excluded_resources": + fail("Only 'excluded_resources' is supported in packaging_options right now!") + else: + apk_builder_args.add("--excluded-resources", actions.write("excluded_resources.txt", value)) + actions.run(apk_builder_args, category = "apk_build") return output_apk @@ -157,11 +176,12 @@ def get_install_info( output_apk: Artifact, manifest: Artifact, exopackage_info: [ExopackageInfo, None], - definitely_has_native_libs: bool = True) -> InstallInfo: + definitely_has_native_libs: bool = True, + apex_mode: bool = False) -> InstallInfo: files = { ctx.attrs.name: output_apk, "manifest": manifest, - "options": generate_install_config(ctx), + "options": generate_install_config(ctx, apex_mode), } if exopackage_info: @@ -200,45 +220,23 @@ def get_install_info( files = files, ) -def _get_exopackage_outputs(exopackage_info: ExopackageInfo) -> list[Artifact]: - outputs = [] - secondary_dex_exopackage_info = exopackage_info.secondary_dex_info - if secondary_dex_exopackage_info: - outputs.append(secondary_dex_exopackage_info.metadata) - outputs.append(secondary_dex_exopackage_info.directory) - - native_library_exopackage_info = exopackage_info.native_library_info - if native_library_exopackage_info: - outputs.append(native_library_exopackage_info.metadata) - outputs.append(native_library_exopackage_info.directory) - - resources_info = exopackage_info.resources_info - if resources_info: - outputs.append(resources_info.res) - outputs.append(resources_info.res_hash) - - if resources_info.assets: - outputs.append(resources_info.assets) - outputs.append(resources_info.assets_hash) - - return outputs - -def generate_install_config(ctx: AnalysisContext) -> Artifact: - data = get_install_config() +def generate_install_config(ctx: AnalysisContext, apex_mode: bool) -> Artifact: + data = get_install_config(apex_mode) return ctx.actions.write_json("install_android_options.json", data) -def get_install_config() -> dict[str, typing.Any]: +def get_install_config(apex_mode: bool) -> dict[str, typing.Any]: # TODO: read from toolchains install_config = { "adb_restart_on_failure": read_root_config("adb", "adb_restart_on_failure", "true"), "agent_port_base": read_root_config("adb", "agent_port_base", "2828"), "always_use_java_agent": read_root_config("adb", "always_use_java_agent", "false"), + "apex_mode": apex_mode, "is_zstd_compression_enabled": read_root_config("adb", "is_zstd_compression_enabled", "false"), "max_retries": read_root_config("adb", "retries", "5"), "multi_install_mode": read_root_config("adb", "multi_install_mode", "false"), "retry_delay_millis": read_root_config("adb", "retry_delay_millis", "500"), "skip_install_metadata": read_root_config("adb", "skip_install_metadata", "false"), - "staged_install_mode": read_root_config("adb", "staged_install_mode", "false"), + "staged_install_mode": read_root_config("adb", "staged_install_mode", None), } adb_executable = read_root_config("android", "adb", None) diff --git a/prelude/android/android_binary.bzl b/prelude/android/android_binary.bzl index f83d25a55c1..f1f2908f3bd 100644 --- a/prelude/android/android_binary.bzl +++ b/prelude/android/android_binary.bzl @@ -10,6 +10,8 @@ load("@prelude//android:android_binary_resources_rules.bzl", "get_android_binary load("@prelude//android:android_build_config.bzl", "generate_android_build_config", "get_build_config_fields") load( "@prelude//android:android_providers.bzl", + "AndroidBinaryNativeLibsInfo", # @unused Used as type + "AndroidBinaryResourcesInfo", # @unused Used as type "AndroidBuildConfigInfo", # @unused Used as type "BuildConfigField", "DexFilesInfo", @@ -24,17 +26,24 @@ load("@prelude//android:preprocess_java_classes.bzl", "get_preprocessed_java_cla load("@prelude//android:proguard.bzl", "get_proguard_output") load("@prelude//android:util.bzl", "create_enhancement_context") load("@prelude//android:voltron.bzl", "get_target_to_module_mapping") -load("@prelude//java:java_providers.bzl", "JavaPackagingInfo", "create_java_packaging_dep", "get_all_java_packaging_deps", "get_all_java_packaging_deps_from_packaging_infos") +load( + "@prelude//java:java_providers.bzl", + "JavaPackagingDep", # @unused Used as type + "JavaPackagingInfo", + "create_java_packaging_dep", + "get_all_java_packaging_deps", + "get_all_java_packaging_deps_from_packaging_infos", +) load("@prelude//utils:expect.bzl", "expect") AndroidBinaryInfo = record( sub_targets = dict, - java_packaging_deps = list["JavaPackagingDep"], + java_packaging_deps = list[JavaPackagingDep], deps_by_platform = dict, primary_platform = str, dex_files_info = DexFilesInfo, - native_library_info = "AndroidBinaryNativeLibsInfo", - resources_info = "AndroidBinaryResourcesInfo", + native_library_info = AndroidBinaryNativeLibsInfo, + resources_info = AndroidBinaryResourcesInfo, materialized_artifacts = list[Artifact], ) @@ -85,6 +94,7 @@ def get_binary_info(ctx: AnalysisContext, use_proto_format: bool) -> AndroidBina generate_strings_and_ids_separately = should_pre_dex, aapt2_preferred_density = ctx.attrs.aapt2_preferred_density, ) + sub_targets["manifest"] = [DefaultInfo(default_output = resources_info.manifest)] android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] compiled_r_dot_java_deps = [ create_java_packaging_dep( @@ -126,7 +136,7 @@ def get_binary_info(ctx: AnalysisContext, use_proto_format: bool) -> AndroidBina else: jars_to_owners = {packaging_dep.jar: packaging_dep.jar.owner.raw_target() for packaging_dep in dex_java_packaging_deps} if ctx.attrs.preprocess_java_classes_bash: - jars_to_owners, materialized_artifacts_dir = get_preprocessed_java_classes(ctx, jars_to_owners) + jars_to_owners, materialized_artifacts_dir = get_preprocessed_java_classes(enhancement_ctx, jars_to_owners) if materialized_artifacts_dir: materialized_artifacts.append(materialized_artifacts_dir) if has_proguard_config: @@ -137,6 +147,7 @@ def get_binary_info(ctx: AnalysisContext, use_proto_format: bool) -> AndroidBina resources_info.proguard_config_file, [no_dx[DefaultInfo].default_outputs[0] for no_dx in ctx.attrs.no_dx if len(no_dx[DefaultInfo].default_outputs) == 1], ) + materialized_artifacts.extend(proguard_output.proguard_artifacts) jars_to_owners = proguard_output.jars_to_owners dir_srcs = {artifact.basename: artifact for artifact in proguard_output.proguard_artifacts} for i, hidden_artifact in enumerate(proguard_output.proguard_hidden_artifacts): diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index aaa384b0590..67b1ca6c3cb 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -25,7 +25,7 @@ load( load("@prelude//cxx:link_types.bzl", "link_options") load( "@prelude//cxx:symbols.bzl", - "extract_global_syms", + "extract_defined_syms", "extract_undefined_syms", ) load("@prelude//java:java_library.bzl", "compile_to_jar") # @unused @@ -37,7 +37,6 @@ load( "LinkArgs", "LinkInfo", "LinkOrdering", - "Linkage", "SharedLibLinkable", "get_lib_output_style", "set_link_info_link_whole", @@ -55,13 +54,17 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibrary", # @unused Used as a type "SharedLibraryInfo", # @unused Used as a type + "create_shlib", "get_strip_non_global_flags", "merge_shared_libraries", "traverse_shared_library_info", + "with_unique_str_sonames", ) load("@prelude//linking:strip.bzl", "strip_object") +load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//utils:argfile.bzl", "argfile") load("@prelude//utils:expect.bzl", "expect") -load("@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", "post_order_traversal", "pre_order_traversal") +load("@prelude//utils:graph_utils.bzl", "GraphTraversal", "depth_first_traversal_by", "post_order_traversal", "pre_order_traversal") load("@prelude//utils:set.bzl", "set", "set_type") # @unused Used as a type load("@prelude//utils:utils.bzl", "dedupe_by_value") @@ -80,22 +83,19 @@ load("@prelude//utils:utils.bzl", "dedupe_by_value") # 2. As assets. These are passed to the APK build as assets, and are stored at # `assets/lib//library.so` In the root module, we only package a native library as an # asset if it is eligible to be an asset (e.g. `can_be_asset` on a `cxx_library`), and -# `package_asset_libraries` is set to True for the APK. We will additionally compress all the -# assets into a single `assets/lib/libs.xz` (or `assets/libs/libs.zstd` for `zstd` compression) -# if `compress_asset_libraries` is set to True for the APK. Regardless of whether we compress -# the assets or not, we create a metadata file at `assets/libs/metadata.txt` that has a single -# line entry for each packaged asset consisting of ' '. +# `package_asset_libraries` is set to True for the APK. We create a metadata file at +# `assets/libs/metadata.txt` that has a single line entry for each packaged asset consisting of +# ' '. # # Any native library that is not part of the root module (i.e. it is part of some other Voltron -# module) is automatically packaged as an asset, and the assets for each module are compressed -# to a single `assets//libs.xz` only if `compress_asset_libraries` is set to True. -# Similarly, the metadata for each module is stored at `assets//libs.txt`. +# module) is automatically packaged as an asset. Similarly, the metadata for each module is stored +# at `assets//libs.txt`. def get_android_binary_native_library_info( enhance_ctx: EnhancementContext, android_packageable_info: AndroidPackageableInfo, deps_by_platform: dict[str, list[Dependency]], - apk_module_graph_file: [Artifact, None] = None, + apk_module_graph_file: Artifact | None = None, prebuilt_native_library_dirs_to_exclude: [set_type, None] = None, shared_libraries_to_exclude: [set_type, None] = None) -> AndroidBinaryNativeLibsInfo: ctx = enhance_ctx.ctx @@ -128,6 +128,7 @@ def get_android_binary_native_library_info( root_module_native_lib_assets = [], non_root_module_native_lib_assets = [], generated_java_code = [], + unstripped_shared_libraries = None, ) native_libs = ctx.actions.declare_output("native_libs_symlink") @@ -136,7 +137,6 @@ def get_android_binary_native_library_info( native_lib_assets_for_primary_apk = ctx.actions.declare_output("native_lib_assets_for_primary_apk_symlink") stripped_native_linkable_assets_for_primary_apk = ctx.actions.declare_output("stripped_native_linkable_assets_for_primary_apk_symlink") root_module_metadata_assets = ctx.actions.declare_output("root_module_metadata_assets_symlink") - root_module_compressed_lib_assets = ctx.actions.declare_output("root_module_compressed_lib_assets_symlink") non_root_module_metadata_assets = ctx.actions.declare_output("non_root_module_metadata_assets_symlink") non_root_module_lib_assets = ctx.actions.declare_output("non_root_module_lib_assets_symlink") @@ -154,7 +154,6 @@ def get_android_binary_native_library_info( unstripped_native_libraries_files, stripped_native_linkable_assets_for_primary_apk, root_module_metadata_assets, - root_module_compressed_lib_assets, non_root_module_metadata_assets, non_root_module_lib_assets, ] @@ -231,6 +230,7 @@ def get_android_binary_native_library_info( abi = mergemap_gencode_jar, abi_as_dir = None, required_for_source_only_abi = False, + abi_jar_snapshot = None, ) generated_java_code.append( JavaLibraryInfo( @@ -242,8 +242,15 @@ def get_android_binary_native_library_info( def dynamic_native_libs_info(ctx: AnalysisContext, artifacts, outputs): get_module_from_target = all_targets_in_root_module + get_module_tdeps = all_targets_in_root_module + get_calculated_module_deps = all_targets_in_root_module + get_deps_debug_data = None if apk_module_graph_file: - get_module_from_target = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts).target_to_module_mapping_function + apk_module_graph = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) + get_module_from_target = apk_module_graph.target_to_module_mapping_function + get_module_tdeps = apk_module_graph.transitive_module_deps_function + get_calculated_module_deps = apk_module_graph.calculated_deps_function + get_deps_debug_data = apk_module_graph.get_deps_debug_data split_groups = None merged_shared_lib_targets_by_platform = {} # dict[str, dict[Label, str]] @@ -358,6 +365,23 @@ def get_android_binary_native_library_info( for lib, platform in unstripped_libs.items() }) + if ctx.attrs._android_toolchain[AndroidToolchainInfo].cross_module_native_deps_check: + # note: can only detect these if linkable_nodes_by_platform is created, ie. if using relinker or merging + cross_module_link_errors = [] + for linkable_nodes in linkable_nodes_by_platform.values(): + for target, node in linkable_nodes.items(): + node_target = str(target.raw_target()) + node_module = get_module_from_target(node_target) + for dep in node.deps: + dep_target = str(dep.raw_target()) + dep_module = get_module_from_target(dep_target) + if not is_root_module(dep_module) and node_module != dep_module and dep_module not in get_module_tdeps(node_module) and dep_module not in get_calculated_module_deps(node_module): + cross_module_link_errors.append("{} (module: {}) -> {} (module: {}) ".format(node_target, node_module, dep_target, dep_module)) + + if cross_module_link_errors: + cross_module_link_errors.append(get_deps_debug_data()) + fail("Native libraries in modules should only depend on libraries in the same module or the root. Remove these deps:\n" + "\n".join(cross_module_link_errors)) + dynamic_info = _get_native_libs_and_assets( ctx, get_module_from_target, @@ -374,11 +398,10 @@ def get_android_binary_native_library_info( ctx.actions.symlink_file(outputs[native_lib_assets_for_primary_apk], dynamic_info.native_lib_assets_for_primary_apk if dynamic_info.native_lib_assets_for_primary_apk else ctx.actions.symlinked_dir("empty_native_lib_assets", {})) ctx.actions.symlink_file(outputs[stripped_native_linkable_assets_for_primary_apk], dynamic_info.stripped_native_linkable_assets_for_primary_apk if dynamic_info.stripped_native_linkable_assets_for_primary_apk else ctx.actions.symlinked_dir("empty_stripped_native_linkable_assets", {})) ctx.actions.symlink_file(outputs[root_module_metadata_assets], dynamic_info.root_module_metadata_assets) - ctx.actions.symlink_file(outputs[root_module_compressed_lib_assets], dynamic_info.root_module_compressed_lib_assets) ctx.actions.symlink_file(outputs[non_root_module_metadata_assets], dynamic_info.non_root_module_metadata_assets) - ctx.actions.symlink_file(outputs[non_root_module_lib_assets], dynamic_info.non_root_module_lib_assets) + ctx.actions.symlink_file(outputs[non_root_module_lib_assets], dynamic_info.non_root_module_lib_assets if dynamic_info.non_root_module_lib_assets else ctx.actions.symlinked_dir("empty_non_root_module_lib_assets", {})) - ctx.actions.dynamic_output(dynamic = dynamic_inputs, inputs = [], outputs = dynamic_outputs, f = dynamic_native_libs_info) + ctx.actions.dynamic_output(dynamic = dynamic_inputs, inputs = [], outputs = [o.as_output() for o in dynamic_outputs], f = dynamic_native_libs_info) all_native_libs = ctx.actions.symlinked_dir("debug_all_native_libs", {"others": native_libs, "primary": native_libs_always_in_primary_apk}) lib_subtargets = _create_library_subtargets(lib_outputs_by_platform, native_libs) @@ -395,9 +418,10 @@ def get_android_binary_native_library_info( shared_libraries = included_shared_lib_targets, native_libs_for_primary_apk = native_libs_for_primary_apk, exopackage_info = exopackage_info, - root_module_native_lib_assets = [native_lib_assets_for_primary_apk, stripped_native_linkable_assets_for_primary_apk, root_module_metadata_assets, root_module_compressed_lib_assets], + root_module_native_lib_assets = [native_lib_assets_for_primary_apk, stripped_native_linkable_assets_for_primary_apk, root_module_metadata_assets], non_root_module_native_lib_assets = [non_root_module_metadata_assets, non_root_module_lib_assets], generated_java_code = generated_java_code, + unstripped_shared_libraries = unstripped_native_libraries_files, ) _NativeLibSubtargetArtifacts = record( @@ -526,12 +550,11 @@ _NativeLibsAndAssetsInfo = record( native_libs = Artifact, native_libs_metadata = Artifact, native_libs_always_in_primary_apk = Artifact, - native_lib_assets_for_primary_apk = [Artifact, None], - stripped_native_linkable_assets_for_primary_apk = [Artifact, None], + native_lib_assets_for_primary_apk = Artifact | None, + stripped_native_linkable_assets_for_primary_apk = Artifact | None, root_module_metadata_assets = Artifact, - root_module_compressed_lib_assets = Artifact, non_root_module_metadata_assets = Artifact, - non_root_module_lib_assets = Artifact, + non_root_module_lib_assets = [Artifact, None], ) def _get_exopackage_info( @@ -551,7 +574,6 @@ def _get_native_libs_and_assets( all_prebuilt_native_library_dirs: list[PrebuiltNativeLibraryDir], platform_to_native_linkables: dict[str, dict[str, SharedLibrary]]) -> _NativeLibsAndAssetsInfo: is_packaging_native_libs_as_assets_supported = getattr(ctx.attrs, "package_asset_libraries", False) - enabled_voltron_non_asset_libs = ctx.attrs._android_toolchain[AndroidToolchainInfo].enabled_voltron_non_asset_libs prebuilt_native_library_dirs = [] prebuilt_native_library_dirs_always_in_primary_apk = [] @@ -572,10 +594,8 @@ def _get_native_libs_and_assets( if not is_root_module(module): if native_lib.is_asset: prebuilt_native_library_dir_module_assets_map.setdefault(module, []).append(native_lib) - elif enabled_voltron_non_asset_libs: - prebuilt_native_library_dir_module_libs_map.setdefault(module, []).append(native_lib) else: - prebuilt_native_library_dirs.append(native_lib) + prebuilt_native_library_dir_module_libs_map.setdefault(module, []).append(native_lib) elif native_lib.is_asset and is_packaging_native_libs_as_assets_supported: expect(not native_lib.for_primary_apk, "{} which is marked as needing to be in the primary APK cannot be an asset".format(native_lib_target)) prebuilt_native_library_dir_assets_for_primary_apk.append(native_lib) @@ -610,52 +630,36 @@ def _get_native_libs_and_assets( package_as_assets = True, module = module, )) - if enabled_voltron_non_asset_libs: - for module, native_lib_dir in prebuilt_native_library_dir_module_libs_map.items(): - native_lib_module_assets_map.setdefault(module, []).append(_filter_prebuilt_native_library_dir( - ctx, - native_lib_dir, - "native_lib_libs_for_module_{}".format(module), - package_as_assets = False, - module = module, - )) + for module, native_lib_dir in prebuilt_native_library_dir_module_libs_map.items(): + native_lib_module_assets_map.setdefault(module, []).append(_filter_prebuilt_native_library_dir( + ctx, + native_lib_dir, + "native_lib_libs_for_module_{}".format(module), + package_as_assets = False, + module = module, + )) stripped_linkables = _get_native_linkables(ctx, platform_to_native_linkables, get_module_from_target, is_packaging_native_libs_as_assets_supported) for module, native_linkable_assets in stripped_linkables.linkable_module_assets_map.items(): native_lib_module_assets_map.setdefault(module, []).append(native_linkable_assets) root_module_metadata_srcs = {} - root_module_compressed_lib_srcs = {} non_root_module_metadata_srcs = {} - non_root_module_compressed_lib_srcs = {} - non_root_module_uncompressed_libs = [] + non_root_module_libs_srcs = [] assets_for_primary_apk = filter(None, [native_lib_assets_for_primary_apk, stripped_linkables.linkable_assets_for_primary_apk]) stripped_linkable_assets_for_primary_apk = stripped_linkables.linkable_assets_for_primary_apk if assets_for_primary_apk: - metadata_file, native_library_paths = _get_native_libs_as_assets_metadata(ctx, assets_for_primary_apk, ROOT_MODULE) + metadata_file = _get_native_libs_as_assets_metadata(ctx, assets_for_primary_apk, ROOT_MODULE) root_module_metadata_srcs[paths.join(_get_native_libs_as_assets_dir(ROOT_MODULE), "metadata.txt")] = metadata_file - if ctx.attrs.compress_asset_libraries: - compressed_lib_dir = _get_compressed_native_libs_as_assets(ctx, assets_for_primary_apk, native_library_paths, ROOT_MODULE) - root_module_compressed_lib_srcs[_get_native_libs_as_assets_dir(ROOT_MODULE)] = compressed_lib_dir - - # Since we're storing these as compressed assets, we need to ignore the uncompressed libs. - native_lib_assets_for_primary_apk = None - stripped_linkable_assets_for_primary_apk = None for module, native_lib_assets in native_lib_module_assets_map.items(): - metadata_file, native_library_paths = _get_native_libs_as_assets_metadata(ctx, native_lib_assets, module) - libs_metadata_path = "libs.txt" - if ctx.attrs._android_toolchain[AndroidToolchainInfo].enabled_voltron_non_asset_libs: - libs_metadata_path = paths.join("assets", "libs.txt") + metadata_file = _get_native_libs_as_assets_metadata(ctx, native_lib_assets, module) + libs_metadata_path = paths.join("assets", "libs.txt") non_root_module_metadata_srcs[paths.join(_get_native_libs_as_assets_dir(module), libs_metadata_path)] = metadata_file - if ctx.attrs.compress_asset_libraries: - compressed_lib_dir = _get_compressed_native_libs_as_assets(ctx, native_lib_assets, native_library_paths, module) - non_root_module_compressed_lib_srcs[_get_native_libs_as_assets_dir(module)] = compressed_lib_dir - else: - non_root_module_uncompressed_libs.extend(native_lib_assets) + non_root_module_libs_srcs.extend(native_lib_assets) - if non_root_module_uncompressed_libs: - expect(not non_root_module_compressed_lib_srcs, "Cannot have both uncompressed and compressed native libraries for a non-root module") + non_root_module_libs = None + if non_root_module_libs_srcs: non_root_module_libs = ctx.actions.declare_output("non_root_module_libs") ctx.actions.run( cmd_args([ @@ -663,11 +667,9 @@ def _get_native_libs_and_assets( "--output-dir", non_root_module_libs.as_output(), "--library-dirs", - ] + non_root_module_uncompressed_libs), + ] + non_root_module_libs_srcs), category = "combine_non_root_module_native_libs", ) - else: - non_root_module_libs = ctx.actions.symlinked_dir("non_root_module_libs", non_root_module_compressed_lib_srcs) combined_native_libs = ctx.actions.declare_output("combined_native_libs", dir = True) native_libs_metadata = ctx.actions.declare_output("native_libs_metadata.txt") @@ -699,7 +701,6 @@ def _get_native_libs_and_assets( native_lib_assets_for_primary_apk = native_lib_assets_for_primary_apk, stripped_native_linkable_assets_for_primary_apk = stripped_linkable_assets_for_primary_apk, root_module_metadata_assets = ctx.actions.symlinked_dir("root_module_metadata_assets", root_module_metadata_srcs), - root_module_compressed_lib_assets = ctx.actions.symlinked_dir("root_module_compressed_lib_assets", root_module_compressed_lib_srcs), non_root_module_metadata_assets = ctx.actions.symlinked_dir("non_root_module_metadata_assets", non_root_module_metadata_srcs), non_root_module_lib_assets = non_root_module_libs, ) @@ -713,20 +714,16 @@ def _filter_prebuilt_native_library_dir( cpu_filters = ctx.attrs.cpu_filters or CPU_FILTER_TO_ABI_DIRECTORY.keys() abis = [CPU_FILTER_TO_ABI_DIRECTORY[cpu] for cpu in cpu_filters] filter_tool = ctx.attrs._android_toolchain[AndroidToolchainInfo].filter_prebuilt_native_library_dir[RunInfo] - native_libs_dirs = [native_lib.dir for native_lib in native_libs] - native_libs_dirs_file = ctx.actions.write("{}_list.txt".format(identifier), native_libs_dirs) + native_libs_dirs_file = argfile(actions = ctx.actions, name = "{}_list.txt".format(identifier), args = [native_lib.dir for native_lib in native_libs]) base_output_dir = ctx.actions.declare_output(identifier, dir = True) - if ctx.attrs._android_toolchain[AndroidToolchainInfo].enabled_voltron_non_asset_libs: - if module == ROOT_MODULE: - output_dir = base_output_dir.project(_get_native_libs_as_assets_dir(module)) if package_as_assets else base_output_dir - elif package_as_assets: - output_dir = base_output_dir.project(paths.join(_get_native_libs_as_assets_dir(module), "assets")) - else: - output_dir = base_output_dir.project(paths.join(_get_native_libs_as_assets_dir(module), "lib")) - else: + if module == ROOT_MODULE: output_dir = base_output_dir.project(_get_native_libs_as_assets_dir(module)) if package_as_assets else base_output_dir + elif package_as_assets: + output_dir = base_output_dir.project(paths.join(_get_native_libs_as_assets_dir(module), "assets")) + else: + output_dir = base_output_dir.project(paths.join(_get_native_libs_as_assets_dir(module), "lib")) ctx.actions.run( - cmd_args([filter_tool, native_libs_dirs_file, output_dir.as_output(), "--abis"] + abis).hidden(native_libs_dirs), + cmd_args([filter_tool, native_libs_dirs_file, output_dir.as_output(), "--abis"] + abis), category = "filter_prebuilt_native_library_dir", identifier = identifier, ) @@ -736,7 +733,7 @@ def _filter_prebuilt_native_library_dir( _StrippedNativeLinkables = record( linkables = Artifact, linkables_always_in_primary_apk = Artifact, - linkable_assets_for_primary_apk = [Artifact, None], + linkable_assets_for_primary_apk = Artifact | None, linkable_module_assets_map = dict[str, Artifact], ) @@ -745,7 +742,6 @@ def _get_native_linkables( platform_to_native_linkables: dict[str, dict[str, SharedLibrary]], get_module_from_target: typing.Callable, package_native_libs_as_assets_enabled: bool) -> _StrippedNativeLinkables: - enabled_voltron_non_asset_libs = ctx.attrs._android_toolchain[AndroidToolchainInfo].enabled_voltron_non_asset_libs stripped_native_linkables_srcs = {} stripped_native_linkables_always_in_primary_apk_srcs = {} stripped_native_linkable_assets_for_primary_apk_srcs = {} @@ -771,30 +767,22 @@ def _get_native_linkables( not native_linkable.for_primary_apk or not native_linkable.can_be_asset, "{} which is marked as needing to be in the primary APK cannot be an asset".format(native_linkable_target), ) - if native_linkable.can_be_asset and not is_root_module(module): - if enabled_voltron_non_asset_libs: - native_libs_assets_dir = paths.join(_get_native_libs_as_assets_dir(module), "assets") - else: - native_libs_assets_dir = _get_native_libs_as_assets_dir(module) - so_name_path = paths.join(native_libs_assets_dir, abi_directory, so_name) - stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = lib - elif native_linkable.can_be_asset and package_native_libs_as_assets_enabled: - if enabled_voltron_non_asset_libs: - native_libs_assets_dir = paths.join(_get_native_libs_as_assets_dir(module), "assets") + + if is_root_module(module): + if native_linkable.can_be_asset and package_native_libs_as_assets_enabled: + native_libs_assets_dir = paths.join(_get_native_libs_as_assets_dir(module)) + so_name_path = paths.join(native_libs_assets_dir, abi_directory, so_name) + stripped_native_linkable_assets_for_primary_apk_srcs[so_name_path] = lib else: - native_libs_assets_dir = _get_native_libs_as_assets_dir(module) - so_name_path = paths.join(native_libs_assets_dir, abi_directory, so_name) - stripped_native_linkable_assets_for_primary_apk_srcs[so_name_path] = lib - elif (enabled_voltron_non_asset_libs and # TODO: when cleaning up this code, restructure if statements to be more clear (start with root module, then non-root module cases) - not native_linkable.can_be_asset and not is_root_module(module)): - so_name_path = paths.join(_get_native_libs_as_assets_dir(module), "lib", abi_directory, so_name) - stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = lib + so_name_path = paths.join(abi_directory, so_name) + if native_linkable.for_primary_apk: + stripped_native_linkables_always_in_primary_apk_srcs[so_name_path] = lib + else: + stripped_native_linkables_srcs[so_name_path] = lib else: - so_name_path = paths.join(abi_directory, so_name) - if native_linkable.for_primary_apk: - stripped_native_linkables_always_in_primary_apk_srcs[so_name_path] = lib - else: - stripped_native_linkables_srcs[so_name_path] = lib + module_dir = "assets" if native_linkable.can_be_asset else "lib" + so_name_path = paths.join(_get_native_libs_as_assets_dir(module), module_dir, abi_directory, so_name) + stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = lib stripped_native_linkables = ctx.actions.symlinked_dir( "stripped_native_linkables", @@ -825,41 +813,22 @@ def _get_native_linkables( def _get_native_libs_as_assets_metadata( ctx: AnalysisContext, native_lib_assets: list[Artifact], - module: str) -> (Artifact, Artifact): - native_lib_assets_file = ctx.actions.write("{}/native_lib_assets".format(module), [cmd_args([native_lib_asset, _get_native_libs_as_assets_dir(module)], delimiter = "/") for native_lib_asset in native_lib_assets]) + module: str) -> Artifact: + native_lib_assets_file = argfile( + actions = ctx.actions, + name = "{}/native_lib_assets".format(module), + args = [cmd_args([native_lib_asset, _get_native_libs_as_assets_dir(module)], delimiter = "/") for native_lib_asset in native_lib_assets], + ) metadata_output = ctx.actions.declare_output("{}/native_libs_as_assets_metadata.txt".format(module)) - native_library_paths = ctx.actions.declare_output("{}/native_libs_as_assets_paths.txt".format(module)) metadata_cmd = cmd_args([ ctx.attrs._android_toolchain[AndroidToolchainInfo].native_libs_as_assets_metadata[RunInfo], "--native-library-dirs", native_lib_assets_file, "--metadata-output", metadata_output.as_output(), - "--native-library-paths-output", - native_library_paths.as_output(), - ]).hidden(native_lib_assets) + ]) ctx.actions.run(metadata_cmd, category = "get_native_libs_as_assets_metadata", identifier = module) - return metadata_output, native_library_paths - -def _get_compressed_native_libs_as_assets( - ctx: AnalysisContext, - native_lib_assets: list[Artifact], - native_library_paths: Artifact, - module: str) -> Artifact: - output_dir = ctx.actions.declare_output("{}/compressed_native_libs_as_assets_dir".format(module)) - compressed_libraries_cmd = cmd_args([ - ctx.attrs._android_toolchain[AndroidToolchainInfo].compress_libraries[RunInfo], - "--libraries", - native_library_paths, - "--output-dir", - output_dir.as_output(), - "--compression-type", - ctx.attrs.asset_compression_algorithm or "xz", - "--xz-compression-level", - str(ctx.attrs.xz_compression_level), - ]).hidden(native_lib_assets) - ctx.actions.run(compressed_libraries_cmd, category = "compress_native_libs_as_assets", identifier = module) - return output_dir + return metadata_output def _get_native_libs_as_assets_dir(module: str) -> str: return "assets/{}".format("lib" if is_root_module(module) else module) @@ -870,8 +839,8 @@ def get_default_shared_libs(ctx: AnalysisContext, deps: list[Dependency], shared deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]), ) return { - so_name: shared_lib - for so_name, shared_lib in traverse_shared_library_info(shared_library_info).items() + soname: shared_lib + for soname, shared_lib in with_unique_str_sonames(traverse_shared_library_info(shared_library_info)).items() if not (shared_libraries_to_exclude and shared_libraries_to_exclude.contains(shared_lib.label.raw_target())) } @@ -987,12 +956,6 @@ def run_mergemap_codegen(ctx: AnalysisContext, merged_library_map: Artifact) -> ctx.actions.run(args, category = "mergemap_codegen") return mapping_java -def expect_dedupe(v): - # asserts that the input list is unique - o = dedupe_by_value(v) - expect(len(o) == len(v), "expected `{}` to be a list of unique items, but it wasn't. deduped list was `{}`.", v, o) - return v - # We can't merge a prebuilt shared (that has no archive) and must use it's original info. # Ideally this would probably be structured info on the linkablenode. def _is_prebuilt_shared(node_data: LinkableNode) -> bool: @@ -1044,7 +1007,7 @@ def _shared_lib_for_prebuilt_shared( transitive_linkable_cache: dict[Label, bool], platform: [str, None] = None) -> SharedLibrary: expect( - len(node_data.shared_libs) == 1, + len(node_data.shared_libs.libraries) == 1, "unexpected shared_libs length for somerge of {} ({})".format(target, node_data.shared_libs), ) @@ -1062,9 +1025,11 @@ def _shared_lib_for_prebuilt_shared( "prebuilt shared library `{}` with exported_deps not supported by somerge".format(target), ) - soname, shlib = node_data.shared_libs.items()[0] + shlib = node_data.shared_libs.libraries[0] + soname = shlib.soname.ensure_str() + shlib = shlib.lib output_path = _platform_output_path(soname, platform) - return SharedLibrary( + return create_shlib( lib = shlib, stripped_lib = strip_lib(ctx, cxx_toolchain, shlib.output, output_path), link_args = None, @@ -1245,8 +1210,8 @@ def _get_merged_linkables_for_platform( # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents exported_linker_flags = None, ) - group_shared_libs[shared_lib.soname] = MergedSharedLibrary( - soname = shared_lib.soname, + group_shared_libs[shared_lib.soname.ensure_str()] = MergedSharedLibrary( + soname = shared_lib.soname.ensure_str(), lib = shared_lib, apk_module = group_data.apk_module, solib_constituents = [], @@ -1269,7 +1234,7 @@ def _get_merged_linkables_for_platform( expect(target_to_link_group[key] == group) node = linkable_nodes[key] - default_solibs = list(node.shared_libs.keys()) + default_solibs = list([shlib.soname.ensure_str() for shlib in node.shared_libs.libraries]) if not default_solibs and node.preferred_linkage == Linkage("static"): default_solibs = [node.default_soname] @@ -1325,7 +1290,7 @@ def _get_merged_linkables_for_platform( soname = soname, link_args = link_args, cxx_toolchain = cxx_toolchain, - shared_lib_deps = [link_group_linkable_nodes[label].shared_lib.soname for label in shlib_deps], + shared_lib_deps = [link_group_linkable_nodes[label].shared_lib.soname.ensure_str() for label in shlib_deps], label = group_data.constituents[0], can_be_asset = can_be_asset, ) @@ -1405,12 +1370,12 @@ def _create_all_relinkable_links( final = final_platform_to_native_linkables[platform][soname] original_args, _ = ctx.actions.write( "{}/{}/original.args".format(platform, soname), - [unpack_link_args(args, True, LinkOrdering("topological")) for args in lib.link_args] if lib.link_args else "", + [unpack_link_args(args, LinkOrdering("topological")) for args in lib.link_args] if lib.link_args else "", allow_args = True, ) final_args, _ = ctx.actions.write( "{}/{}/final.args".format(platform, soname), - [unpack_link_args(args, True, LinkOrdering("topological")) for args in final.link_args] if final.link_args else "", + [unpack_link_args(args, LinkOrdering("topological")) for args in final.link_args] if final.link_args else "", allow_args = True, ) debug_outputs["{}/{}/original.args".format(platform, soname)] = original_args @@ -1470,16 +1435,16 @@ def _create_relinkable_links( can_be_asset = node.can_be_asset, ) shared_lib_overrides[target] = LinkInfo( - name = shared_lib.soname, + name = shared_lib.soname.ensure_str(), pre_flags = node.linker_flags.exported_flags, linkables = [SharedLibLinkable( lib = shared_lib.lib.output, )], post_flags = node.linker_flags.exported_post_flags, ) - shared_libs[shared_lib.soname] = shared_lib + shared_libs[shared_lib.soname.ensure_str()] = shared_lib - return {lib.soname: lib for lib in shared_libs.values()}, debug_link_deps + return {lib.soname.ensure_str(): lib for lib in shared_libs.values()}, debug_link_deps # To support migration from a tset-based link strategy, we are trying to match buck's internal tset # traversal logic here. Look for implementation of TopologicalTransitiveSetIteratorGen @@ -1488,7 +1453,7 @@ def _rust_matching_topological_traversal( get_nodes_to_traverse_func: typing.Callable) -> list[typing.Any]: counts = {} - for label in breadth_first_traversal_by(None, roots, get_nodes_to_traverse_func): + for label in depth_first_traversal_by(None, roots, get_nodes_to_traverse_func, GraphTraversal("preorder-right-to-left")): for dep in get_nodes_to_traverse_func(label): if dep in counts: counts[dep] += 1 @@ -1649,6 +1614,20 @@ def _create_merged_link_args( # 5. extract the list of undefined symbols in the relinked libs (i.e. those symbols needed from dependencies and what had been # used in (1) above from higher nodes). def relink_libraries(ctx: AnalysisContext, libraries_by_platform: dict[str, dict[str, SharedLibrary]]) -> dict[str, dict[str, SharedLibrary]]: + relinker_extra_deps = getattr(ctx.attrs, "relinker_extra_deps", None) + red_linkables = {} + if relinker_extra_deps: + for red_elem in relinker_extra_deps: + for platform, red in red_elem.items(): + red_link_graph = red.get(LinkableGraph) + expect(red_link_graph != None, "relinker_extra_deps (`{}`) should be a linkable target", red.label) + red_linkable = red_link_graph.nodes.value.linkable + expect(red_linkable != None, "relinker_extra_deps (`{}`) should be a linkable target", red.label) + expect(red_linkable.preferred_linkage == Linkage("static"), "buck2 currently only supports preferred_linkage='static' relinker_extra_deps") + if platform not in red_linkables: + red_linkables[platform] = [] + red_linkables[platform].append((red.label, red_linkable.link_infos[LibOutputStyle("pic_archive")].default)) + relinked_libraries_by_platform = {} for platform, shared_libraries in libraries_by_platform.items(): cxx_toolchain = ctx.attrs._cxx_toolchain[platform][CxxToolchainInfo] @@ -1685,7 +1664,11 @@ def relink_libraries(ctx: AnalysisContext, libraries_by_platform: dict[str, dict provided_symbols = provided_symbols_file, needed_symbols = needed_symbols_for_this, ) - relinker_link_args = original_shared_library.link_args + [LinkArgs(flags = [cmd_args(relinker_version_script, format = "-Wl,--version-script={}")])] + relinker_link_args = ( + original_shared_library.link_args + + [LinkArgs(flags = [cmd_args(relinker_version_script, format = "-Wl,--version-script={}")])] + + ([LinkArgs(infos = [set_link_info_link_whole(red_linkable[1]) for red_linkable in red_linkables[platform]])] if len(red_linkables) > 0 else []) + ) shared_lib = create_shared_lib( ctx, @@ -1707,7 +1690,7 @@ def relink_libraries(ctx: AnalysisContext, libraries_by_platform: dict[str, dict return relinked_libraries_by_platform def extract_provided_symbols(ctx: AnalysisContext, toolchain: CxxToolchainInfo, lib: Artifact) -> Artifact: - return extract_global_syms(ctx, toolchain, lib, "relinker_extract_provided_symbols") + return extract_defined_syms(ctx, toolchain, lib, "relinker_extract_provided_symbols") def create_relinker_version_script(actions: AnalysisActions, relinker_allowlist: list[regex], output: Artifact, provided_symbols: Artifact, needed_symbols: list[Artifact]): def create_version_script(ctx, artifacts, outputs): @@ -1743,7 +1726,7 @@ def create_relinker_version_script(actions: AnalysisActions, relinker_allowlist: version_script += "};\n" ctx.actions.write(outputs[output], version_script) - actions.dynamic_output(dynamic = needed_symbols + [provided_symbols], inputs = [], outputs = [output], f = create_version_script) + actions.dynamic_output(dynamic = needed_symbols + [provided_symbols], inputs = [], outputs = [output.as_output()], f = create_version_script) def extract_undefined_symbols(ctx: AnalysisContext, toolchain: CxxToolchainInfo, lib: Artifact) -> Artifact: return extract_undefined_syms(ctx, toolchain, lib, "relinker_extract_undefined_symbols") @@ -1757,7 +1740,7 @@ def union_needed_symbols(actions: AnalysisActions, output: Artifact, needed_symb symbols = sorted(unioned_symbols.keys()) ctx.actions.write(outputs[output], symbols) - actions.dynamic_output(dynamic = needed_symbols, inputs = [], outputs = [output], f = compute_union) + actions.dynamic_output(dynamic = needed_symbols, inputs = [], outputs = [output.as_output()], f = compute_union) def strip_lib(ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, shlib: Artifact, output_path: [str, None] = None): strip_flags = cmd_args(get_strip_non_global_flags(cxx_toolchain)) @@ -1802,7 +1785,7 @@ def create_shared_lib( ) shlib = link_result.linked_object - return SharedLibrary( + return create_shlib( lib = shlib, stripped_lib = strip_lib(ctx, cxx_toolchain, shlib.output), shlib_deps = shared_lib_deps, diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index 297d7defa33..b12240075c5 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -19,13 +19,14 @@ load( ) load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:set.bzl", "set_type") # @unused Used as a type +load("@prelude//utils:utils.bzl", "flatten") load("@prelude//decls/android_rules.bzl", "RType") _FilteredResourcesOutput = record( resource_infos = list[AndroidResourceInfo], voltron_res = list[Artifact], - override_symbols = [Artifact, None], - string_files_list = [Artifact, None], + override_symbols = Artifact | None, + string_files_list = Artifact | None, string_files_res_dirs = list[Artifact], ) @@ -36,7 +37,7 @@ def get_android_binary_resources_info( java_packaging_deps: list[JavaPackagingDep], use_proto_format: bool, referenced_resources_lists: list[Artifact], - apk_module_graph_file: [Artifact, None] = None, + apk_module_graph_file: Artifact | None = None, manifest_entries: dict = {}, resource_infos_to_exclude: [set_type, None] = None, r_dot_java_packages_to_exclude: [list[str], None] = [], @@ -241,15 +242,17 @@ def _maybe_filter_resources( filter_resources_cmd = cmd_args(android_toolchain.filter_resources[RunInfo]) in_res_dirs = res_to_out_res_dir.keys() - filter_resources_cmd.hidden(in_res_dirs) - filter_resources_cmd.hidden([out_res.as_output() for out_res in res_to_out_res_dir.values()]) + filter_resources_cmd.add(cmd_args( + hidden = + in_res_dirs + [out_res.as_output() for out_res in res_to_out_res_dir.values()], + )) filter_resources_cmd.add([ "--in-res-dir-to-out-res-dir-map", ctx.actions.write_json("in_res_dir_to_out_res_dir_map", {"res_dir_map": res_to_out_res_dir}), ]) if is_voltron_language_pack_enabled: - filter_resources_cmd.hidden([out_res.as_output() for out_res in voltron_res_to_out_res_dir.values()]) + filter_resources_cmd.add(cmd_args(hidden = [out_res.as_output() for out_res in voltron_res_to_out_res_dir.values()])) filter_resources_cmd.add([ "--voltron-in-res-dir-to-out-res-dir-map", ctx.actions.write_json("voltron_in_res_dir_to_out_res_dir_map", {"res_dir_map": voltron_res_to_out_res_dir}), @@ -367,7 +370,7 @@ def _maybe_generate_string_source_map( should_build_source_string_map: bool, res_dirs: list[Artifact], android_toolchain: AndroidToolchainInfo, - is_voltron_string_source_map: bool = False) -> [Artifact, None]: + is_voltron_string_source_map: bool = False) -> Artifact | None: if not should_build_source_string_map or len(res_dirs) == 0: return None @@ -380,7 +383,7 @@ def _maybe_generate_string_source_map( res_dirs_file, "--output", output.as_output(), - ]).hidden(res_dirs) + ], hidden = res_dirs) if is_voltron_string_source_map: generate_string_source_map_cmd.add("--is-voltron") @@ -391,10 +394,10 @@ def _maybe_generate_string_source_map( def _maybe_package_strings_as_assets( ctx: AnalysisContext, - string_files_list: [Artifact, None], + string_files_list: Artifact | None, string_files_res_dirs: list[Artifact], r_dot_txt: Artifact, - android_toolchain: AndroidToolchainInfo) -> [Artifact, None]: + android_toolchain: AndroidToolchainInfo) -> Artifact | None: resource_compression_mode = getattr(ctx.attrs, "resource_compression", "disabled") is_store_strings_as_assets = _is_store_strings_as_assets(resource_compression_mode) expect(is_store_strings_as_assets == (string_files_list != None)) @@ -420,7 +423,7 @@ def _maybe_package_strings_as_assets( string_assets_zip.as_output(), "--all-locales-string-assets-zip", all_locales_string_assets_zip.as_output(), - ]).hidden(string_files_res_dirs) + ], hidden = string_files_res_dirs) if locales: package_strings_as_assets_cmd.add("--locales", ",".join(locales)) @@ -481,7 +484,7 @@ def get_manifest( def _get_module_manifests( ctx: AnalysisContext, manifest_entries: dict, - apk_module_graph_file: [Artifact, None], + apk_module_graph_file: Artifact | None, use_proto_format: bool, primary_resources_apk: Artifact) -> list[Artifact]: if not apk_module_graph_file: @@ -533,7 +536,7 @@ def _get_module_manifests( ctx.actions.dynamic_output( dynamic = [apk_module_graph_file], inputs = [], - outputs = [module_manifests_dir], + outputs = [module_manifests_dir.as_output()], f = get_manifests_modular, ) @@ -546,13 +549,17 @@ def _merge_assets( is_exopackaged_enabled_for_resources: bool, base_apk: Artifact, resource_infos: list[AndroidResourceInfo], - cxx_resources: [Artifact, None], + cxx_resources: Artifact | None, is_bundle_build: bool, - apk_module_graph_file: [Artifact, None]) -> (Artifact, [Artifact, None], [Artifact, None], [Artifact, None]): + apk_module_graph_file: Artifact | None) -> (Artifact, Artifact | None, Artifact | None, Artifact | None): expect( not (is_exopackaged_enabled_for_resources and is_bundle_build), "Cannot use exopackage-for-resources with AAB builds.", ) + expect( + not (is_exopackaged_enabled_for_resources and apk_module_graph_file), + "Cannot use exopackage-for-resources with Voltron builds.", + ) asset_resource_infos = [resource_info for resource_info in resource_infos if resource_info.assets] if not asset_resource_infos and not cxx_resources: return base_apk, None, None, None @@ -561,7 +568,7 @@ def _merge_assets( def get_common_merge_assets_cmd( ctx: AnalysisContext, - output_apk: Artifact) -> (cmd_args, [Artifact, None]): + output_apk: Artifact) -> (cmd_args, Artifact | None): merge_assets_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].merge_assets[RunInfo]) merge_assets_cmd.add(["--output-apk", output_apk.as_output()]) @@ -576,11 +583,18 @@ def _merge_assets( merge_assets_cmd.add(["--base-apk", base_apk]) merged_assets_output_hash = None + merge_assets_cmd.add("--binary-type", "aab" if is_bundle_build else "apk") + return merge_assets_cmd, merged_assets_output_hash - # For Voltron AAB builds, we need to put assets into a separate "APK" for each module. - if is_bundle_build and apk_module_graph_file: - module_assets_apks_dir = ctx.actions.declare_output("module_assets_apks") + if apk_module_graph_file: + declared_outputs = [merged_assets_output] + if is_bundle_build: + # For Voltron AAB builds, we need to put assets into a separate "APK" for each module. + module_assets_apks_dir = ctx.actions.declare_output("module_assets_apks") + declared_outputs.append(module_assets_apks_dir) + else: + module_assets_apks_dir = None def merge_assets_modular(ctx: AnalysisContext, artifacts, outputs): apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) @@ -594,18 +608,19 @@ def _merge_assets( merge_assets_cmd, _ = get_common_merge_assets_cmd(ctx, outputs[merged_assets_output]) - merge_assets_cmd.add(["--module-assets-apks-dir", outputs[module_assets_apks_dir].as_output()]) + if is_bundle_build: + merge_assets_cmd.add(["--module-assets-apks-dir", outputs[module_assets_apks_dir].as_output()]) assets_dirs_file = ctx.actions.write_json("assets_dirs.json", module_to_assets_dirs) merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) - merge_assets_cmd.hidden([resource_info.assets for resource_info in asset_resource_infos]) + merge_assets_cmd.add(cmd_args(hidden = flatten(module_to_assets_dirs.values()))) ctx.actions.run(merge_assets_cmd, category = "merge_assets") ctx.actions.dynamic_output( dynamic = [apk_module_graph_file], inputs = [], - outputs = [module_assets_apks_dir, merged_assets_output], + outputs = [o.as_output() for o in declared_outputs], f = merge_assets_modular, ) @@ -619,7 +634,7 @@ def _merge_assets( assets_dirs.extend([cxx_resources]) assets_dirs_file = ctx.actions.write_json("assets_dirs.json", {ROOT_MODULE: assets_dirs}) merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) - merge_assets_cmd.hidden(assets_dirs) + merge_assets_cmd.add(cmd_args(hidden = assets_dirs)) ctx.actions.run(merge_assets_cmd, category = "merge_assets") @@ -647,7 +662,7 @@ def get_effective_banned_duplicate_resource_types( else: fail("Unrecognized duplicate_resource_behavior: {}".format(duplicate_resource_behavior)) -def get_cxx_resources(ctx: AnalysisContext, deps: list[Dependency], dir_name: str = "cxx_resources_dir") -> [Artifact, None]: +def get_cxx_resources(ctx: AnalysisContext, deps: list[Dependency], dir_name: str = "cxx_resources_dir") -> Artifact | None: cxx_resources = gather_resources( label = ctx.label, resources = {}, diff --git a/prelude/android/android_build_config.bzl b/prelude/android/android_build_config.bzl index 452b3132c6a..95ab62c55c7 100644 --- a/prelude/android/android_build_config.bzl +++ b/prelude/android/android_build_config.bzl @@ -54,7 +54,7 @@ def generate_android_build_config( java_package: str, use_constant_expressions: bool, default_values: list[BuildConfigField], - values_file: [Artifact, None]) -> (JavaLibraryInfo, JavaPackagingInfo, Artifact): + values_file: Artifact | None) -> (JavaLibraryInfo, JavaPackagingInfo, Artifact): build_config_dot_java = _generate_build_config_dot_java(ctx, source, java_package, use_constant_expressions, default_values, values_file) compiled_build_config_dot_java = _compile_and_package_build_config_dot_java(ctx, java_package, build_config_dot_java) @@ -76,7 +76,7 @@ def _generate_build_config_dot_java( java_package: str, use_constant_expressions: bool, default_values: list[BuildConfigField], - values_file: [Artifact, None]) -> Artifact: + values_file: Artifact | None) -> Artifact: generate_build_config_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].generate_build_config[RunInfo]) generate_build_config_cmd.add([ "--source", diff --git a/prelude/android/android_bundle.bzl b/prelude/android/android_bundle.bzl index 849f242c29b..3168922a117 100644 --- a/prelude/android/android_bundle.bzl +++ b/prelude/android/android_bundle.bzl @@ -9,7 +9,10 @@ load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_binary.bzl", "get_binary_info") load("@prelude//android:android_providers.bzl", "AndroidAabInfo", "AndroidBinaryNativeLibsInfo", "AndroidBinaryResourcesInfo", "DexFilesInfo") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//android:bundletool_util.bzl", "derive_universal_apk") +load("@prelude//java:java_providers.bzl", "KeystoreInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") +load("@prelude//utils:argfile.bzl", "argfile") def android_bundle_impl(ctx: AnalysisContext) -> list[Provider]: android_binary_info = get_binary_info(ctx, use_proto_format = True) @@ -23,11 +26,28 @@ def android_bundle_impl(ctx: AnalysisContext) -> list[Provider]: resources_info = android_binary_info.resources_info, bundle_config = ctx.attrs.bundle_config_file, validation_deps_outputs = get_validation_deps_outputs(ctx), + packaging_options = ctx.attrs.packaging_options, ) + sub_targets = {} + sub_targets.update(android_binary_info.sub_targets) + if ctx.attrs.use_derived_apk: + keystore = ctx.attrs.keystore[KeystoreInfo] + default_output = derive_universal_apk( + ctx, + android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo], + app_bundle = output_bundle, + keystore = keystore, + ) + sub_targets["aab"] = [DefaultInfo( + default_outputs = [output_bundle], + )] + else: + default_output = output_bundle + java_packaging_deps = android_binary_info.java_packaging_deps return [ - DefaultInfo(default_output = output_bundle, other_outputs = android_binary_info.materialized_artifacts, sub_targets = android_binary_info.sub_targets), + DefaultInfo(default_output = default_output, other_outputs = android_binary_info.materialized_artifacts, sub_targets = sub_targets), AndroidAabInfo(aab = output_bundle, manifest = android_binary_info.resources_info.manifest, materialized_artifacts = android_binary_info.materialized_artifacts), TemplatePlaceholderInfo( keyed_variables = { @@ -44,11 +64,12 @@ def build_bundle( dex_files_info: DexFilesInfo, native_library_info: AndroidBinaryNativeLibsInfo, resources_info: AndroidBinaryResourcesInfo, - bundle_config: [Artifact, None], - validation_deps_outputs: [list[Artifact], None] = None) -> Artifact: + bundle_config: Artifact | None, + validation_deps_outputs: [list[Artifact], None] = None, + packaging_options: dict | None = None) -> Artifact: output_bundle = actions.declare_output("{}.aab".format(label.name)) - bundle_builder_args = cmd_args([ + bundle_builder_args = cmd_args( android_toolchain.bundle_builder[RunInfo], "--output-bundle", output_bundle.as_output(), @@ -56,12 +77,10 @@ def build_bundle( resources_info.primary_resources_apk, "--dex-file", dex_files_info.primary_dex, - ]) - - # The outputs of validation_deps need to be added as hidden arguments - # to an action for the validation_deps targets to be built and enforced. - if validation_deps_outputs: - bundle_builder_args.hidden(validation_deps_outputs) + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + hidden = validation_deps_outputs or [], + ) if bundle_config: bundle_builder_args.add(["--path-to-bundle-config-file", bundle_config]) @@ -70,28 +89,16 @@ def build_bundle( bundle_builder_args.add("--package-meta-inf-version-files") root_module_asset_directories = native_library_info.root_module_native_lib_assets + dex_files_info.root_module_secondary_dex_dirs - root_module_asset_directories_file = actions.write("root_module_asset_directories.txt", root_module_asset_directories) - bundle_builder_args.hidden(root_module_asset_directories) - - if android_toolchain.enabled_voltron_non_asset_libs: - non_root_module_asset_directories = resources_info.module_manifests + dex_files_info.non_root_module_secondary_dex_dirs - non_root_module_asset_directories_file = actions.write("non_root_module_asset_directories.txt", non_root_module_asset_directories) - bundle_builder_args.hidden(non_root_module_asset_directories) - non_root_module_asset_native_lib_directories = actions.write("non_root_module_asset_native_lib_directories.txt", native_library_info.non_root_module_native_lib_assets) - bundle_builder_args.hidden(native_library_info.non_root_module_native_lib_assets) - else: - non_root_module_asset_directories = resources_info.module_manifests + native_library_info.non_root_module_native_lib_assets + dex_files_info.non_root_module_secondary_dex_dirs - non_root_module_asset_directories_file = actions.write("non_root_module_asset_directories.txt", non_root_module_asset_directories) - bundle_builder_args.hidden(non_root_module_asset_directories) - non_root_module_asset_native_lib_directories = actions.write("non_root_module_asset_native_lib_directories.txt", "") + root_module_asset_directories_file = argfile(actions = actions, name = "root_module_asset_directories.txt", args = root_module_asset_directories) - native_library_directories = actions.write("native_library_directories", native_library_info.native_libs_for_primary_apk) - bundle_builder_args.hidden(native_library_info.native_libs_for_primary_apk) + non_root_module_asset_directories = resources_info.module_manifests + dex_files_info.non_root_module_secondary_dex_dirs + non_root_module_asset_directories_file = argfile(actions = actions, name = "non_root_module_asset_directories.txt", args = non_root_module_asset_directories) + non_root_module_asset_native_lib_directories = argfile(actions = actions, name = "non_root_module_asset_native_lib_directories.txt", args = native_library_info.non_root_module_native_lib_assets) + + native_library_directories = argfile(actions = actions, name = "native_library_directories", args = native_library_info.native_libs_for_primary_apk) all_zip_files = [resources_info.packaged_string_assets] if resources_info.packaged_string_assets else [] - zip_files = actions.write("zip_files", all_zip_files) - bundle_builder_args.hidden(all_zip_files) - jar_files_that_may_contain_resources = actions.write("jar_files_that_may_contain_resources", resources_info.jar_files_that_may_contain_resources) - bundle_builder_args.hidden(resources_info.jar_files_that_may_contain_resources) + zip_files = argfile(actions = actions, name = "zip_files", args = all_zip_files) + jar_files_that_may_contain_resources = argfile(actions = actions, name = "jar_files_that_may_contain_resources", args = resources_info.jar_files_that_may_contain_resources) if resources_info.module_assets: bundle_builder_args.add(["--module-assets-dir", resources_info.module_assets]) @@ -113,6 +120,13 @@ def build_bundle( android_toolchain.zipalign[RunInfo], ]) + if packaging_options: + for key, value in packaging_options.items(): + if key != "excluded_resources": + fail("Only 'excluded_resources' is supported in packaging_options right now!") + else: + bundle_builder_args.add("--excluded-resources", actions.write("excluded_resources.txt", value)) + actions.run(bundle_builder_args, category = "bundle_build") return output_bundle diff --git a/prelude/android/android_instrumentation_apk.bzl b/prelude/android/android_instrumentation_apk.bzl index d534f5e33f7..f7eb09a8d31 100644 --- a/prelude/android/android_instrumentation_apk.bzl +++ b/prelude/android/android_instrumentation_apk.bzl @@ -12,14 +12,20 @@ load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidApkUnd load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//android:configuration.bzl", "get_deps_by_platform") load("@prelude//android:dex_rules.bzl", "get_multi_dex", "get_single_primary_dex", "get_split_dex_merge_config", "merge_to_single_dex", "merge_to_split_dex") +load("@prelude//android:preprocess_java_classes.bzl", "get_preprocessed_java_classes") load("@prelude//android:util.bzl", "create_enhancement_context") +load("@prelude//java:class_to_srcs.bzl", "merge_class_to_source_map_from_jar") load("@prelude//java:java_providers.bzl", "create_java_packaging_dep", "get_all_java_packaging_deps") +load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//java/utils:java_utils.bzl", "get_class_to_source_map_info") load("@prelude//utils:expect.bzl", "expect") def android_instrumentation_apk_impl(ctx: AnalysisContext): _verify_params(ctx) + # jar preprocessing cannot be used when the jars were dexed already, so we have to disable predex when we want to preprocess the jars. + disable_pre_dex = ctx.attrs.disable_pre_dex or ctx.attrs.preprocess_java_classes_bash + apk_under_test_info = ctx.attrs.apk[AndroidApkUnderTestInfo] # android_instrumentation_apk uses the same platforms as the APK-under-test @@ -65,7 +71,10 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): for r_dot_java in resources_info.r_dot_java_infos ] - if not ctx.attrs.disable_pre_dex: + enhance_ctx = create_enhancement_context(ctx) + sub_targets = enhance_ctx.get_sub_targets() + materialized_artifacts = [] + if not disable_pre_dex: pre_dexed_libs = [java_packaging_dep.dex for java_packaging_dep in java_packaging_deps] if ctx.attrs.use_split_dex: dex_merge_config = get_split_dex_merge_config(ctx, android_toolchain) @@ -79,6 +88,10 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): dex_files_info = merge_to_single_dex(ctx, android_toolchain, pre_dexed_libs) else: jars_to_owners = {packaging_dep.jar: packaging_dep.jar.owner.raw_target() for packaging_dep in java_packaging_deps} + if ctx.attrs.preprocess_java_classes_bash: + jars_to_owners, materialized_artifacts_dir = get_preprocessed_java_classes(enhance_ctx, jars_to_owners) + if materialized_artifacts_dir: + materialized_artifacts.append(materialized_artifacts_dir) if ctx.attrs.use_split_dex: dex_files_info = get_multi_dex( ctx, @@ -93,7 +106,6 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): jars_to_owners.keys(), ) - enhance_ctx = create_enhancement_context(ctx) native_library_info = get_android_binary_native_library_info( enhance_ctx, android_packageable_info, @@ -112,16 +124,24 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): resources_info = resources_info, ) - class_to_srcs, _ = get_class_to_source_map_info( + class_to_srcs, _, class_to_srcs_subtargets = get_class_to_source_map_info( ctx, outputs = None, deps = deps, ) + transitive_class_to_src_map = merge_class_to_source_map_from_jar( + actions = ctx.actions, + name = ctx.label.name + ".transitive_class_to_src.json", + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], + relative_to = None, + deps = [class_to_srcs], + ) + sub_targets["transitive_class_to_src_map"] = [DefaultInfo(default_output = transitive_class_to_src_map)] return [ - AndroidApkInfo(apk = output_apk, manifest = resources_info.manifest), + AndroidApkInfo(apk = output_apk, materialized_artifacts = materialized_artifacts, manifest = resources_info.manifest), AndroidInstrumentationApkInfo(apk_under_test = ctx.attrs.apk[AndroidApkInfo].apk), - DefaultInfo(default_output = output_apk, sub_targets = enhance_ctx.get_sub_targets()), + DefaultInfo(default_output = output_apk, other_outputs = materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), class_to_srcs, ] diff --git a/prelude/android/android_instrumentation_test.bzl b/prelude/android/android_instrumentation_test.bzl index 3b64d459925..a1a373991f3 100644 --- a/prelude/android/android_instrumentation_test.bzl +++ b/prelude/android/android_instrumentation_test.bzl @@ -8,8 +8,17 @@ load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidInstrumentationApkInfo") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//java:class_to_srcs.bzl", "JavaClassToSourceMapInfo") +load("@prelude//java:java_providers.bzl", "JavaPackagingInfo", "get_all_java_packaging_deps_tset") load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraryInfo", + "create_shlib_symlink_tree", + "merge_shared_libraries", + "traverse_shared_library_info", +) +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:expect.bzl", "expect") load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") @@ -17,22 +26,42 @@ ANDROID_EMULATOR_ABI_LABEL_PREFIX = "tpx-re-config::" DEFAULT_ANDROID_SUBPLATFORM = "android-30" DEFAULT_ANDROID_PLATFORM = "android-emulator" DEFAULT_ANDROID_INSTRUMENTATION_TESTS_USE_CASE = "instrumentation-tests" +RIOT_USE_CASE = "riot" +SUPPORTED_POOLS = ["EUREKA_POOL", "HOLLYWOOD_POOL", "STAGE_DELPHI_POOL"] +SUPPORTED_PLATFORMS = ["riot", "android-emulator"] +SUPPORTED_USE_CASES = [RIOT_USE_CASE, DEFAULT_ANDROID_INSTRUMENTATION_TESTS_USE_CASE] def android_instrumentation_test_impl(ctx: AnalysisContext): android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] - cmd = [ctx.attrs._java_toolchain[JavaToolchainInfo].java_for_tests] + cmd = [ctx.attrs._java_test_toolchain[JavaToolchainInfo].java_for_tests] classpath = android_toolchain.instrumentation_test_runner_classpath classpath_args = cmd_args() classpath_args.add("-classpath") + env = ctx.attrs.env or {} extra_classpath = [] if ctx.attrs.instrumentation_test_listener != None: - extra_classpath.append(ctx.attrs.instrumentation_test_listener) - classpath_args.add(cmd_args(classpath + extra_classpath, delimiter = get_path_separator_for_exec_os(ctx))) - classpath_args_file = ctx.actions.write("classpath_args_file", classpath_args) - cmd.append(cmd_args(classpath_args_file, format = "@{}").hidden(classpath_args)) + extra_classpath.extend([ + get_all_java_packaging_deps_tset(ctx, java_packaging_infos = [ctx.attrs.instrumentation_test_listener[JavaPackagingInfo]]) + .project_as_args("full_jar_args", ordering = "bfs"), + ]) + + shared_library_info = merge_shared_libraries( + ctx.actions, + deps = [ctx.attrs.instrumentation_test_listener[SharedLibraryInfo]], + ) + + cxx_library_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = "cxx_library_symlink_tree", + shared_libs = traverse_shared_library_info(shared_library_info), + ) + + env["BUCK_LD_SYMLINK_TREE"] = cxx_library_symlink_tree + classpath_args.add(cmd_args(extra_classpath + classpath, delimiter = get_path_separator_for_exec_os(ctx))) + cmd.append(at_argfile(actions = ctx.actions, name = "classpath_args_file", args = classpath_args)) cmd.append(android_toolchain.instrumentation_test_runner_main_class) @@ -42,6 +71,17 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): instrumentation_apk_info = ctx.attrs.apk.get(AndroidInstrumentationApkInfo) if instrumentation_apk_info != None: cmd.extend(["--apk-under-test-path", instrumentation_apk_info.apk_under_test]) + if ctx.attrs.is_self_instrumenting: + cmd.extend(["--is-self-instrumenting"]) + extra_instrumentation_args = ctx.attrs.extra_instrumentation_args + if extra_instrumentation_args: + for arg_name, arg_value in extra_instrumentation_args.items(): + cmd.extend( + [ + "--extra-instrumentation-argument", + cmd_args([arg_name, arg_value], delimiter = "="), + ], + ) target_package_file = ctx.actions.declare_output("target_package_file") package_file = ctx.actions.declare_output("package_file") @@ -72,6 +112,25 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): if ctx.attrs.instrumentation_test_listener_class != None: cmd.extend(["--extra-instrumentation-test-listener", ctx.attrs.instrumentation_test_listener_class]) + if ctx.attrs.clear_package_data: + cmd.append("--clear-package-data") + + if ctx.attrs.disable_animations: + cmd.append("--disable-animations") + + if ctx.attrs.collect_tombstones: + cmd.append("--collect-tombstones") + if ctx.attrs.record_video: + cmd.append("--record-video") + if ctx.attrs.log_extractors: + for arg_name, arg_value in ctx.attrs.log_extractors.items(): + cmd.extend( + [ + "--log-extractor", + cmd_args([arg_name, arg_value], delimiter = "="), + ], + ) + cmd.extend( [ "--adb-executable-path", @@ -81,49 +140,84 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): ], ) - remote_execution_properties = { - "platform": _compute_emulator_platform(ctx.attrs.labels or []), - "subplatform": _compute_emulator_subplatform(ctx.attrs.labels or []), - } - re_emulator_abi = _compute_emulator_abi(ctx.attrs.labels or []) - if re_emulator_abi != None: - remote_execution_properties["abi"] = re_emulator_abi - test_info = ExternalRunnerTestInfo( type = "android_instrumentation", command = cmd, - env = ctx.attrs.env, + env = env, labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, run_from_project_root = True, use_project_relative_paths = True, - executor_overrides = { - "android-emulator": CommandExecutorConfig( - local_enabled = android_toolchain.instrumentation_test_can_run_locally, - remote_enabled = True, - remote_execution_properties = remote_execution_properties, - remote_execution_use_case = _compute_re_use_case(ctx.attrs.labels or []), - ), - "static-listing": CommandExecutorConfig( - local_enabled = True, - remote_enabled = True, - remote_execution_properties = { - "platform": "linux-remote-execution", - }, - remote_execution_use_case = "buck2-default", - ), - }, + executor_overrides = _compute_executor_overrides(ctx, android_toolchain.instrumentation_test_can_run_locally), local_resources = { - "android_emulator": None, + "android_emulator": None if ctx.attrs._android_emulators == None else ctx.attrs._android_emulators.label, }, + required_local_resources = [RequiredTestLocalResource("android_emulator", listing = True, execution = True)], ) classmap_source_info = [ctx.attrs.apk[JavaClassToSourceMapInfo]] if JavaClassToSourceMapInfo in ctx.attrs.apk else [] - return inject_test_run_info(ctx, test_info) + [ + test_info, run_info = inject_test_run_info(ctx, test_info) + + # We append additional args so that "buck2 run" will work with sane defaults + run_info.args.add(cmd_args(["--auto-run-on-connected-device", "--output", ".", "--adb-executable-path", "adb"])) + return [ + test_info, + run_info, DefaultInfo(), ] + classmap_source_info +def _compute_executor_overrides(ctx: AnalysisContext, instrumentation_test_can_run_locally: bool) -> dict[str, CommandExecutorConfig]: + remote_execution_properties = { + "platform": _compute_emulator_platform(ctx.attrs.labels or []), + "subplatform": _compute_emulator_subplatform(ctx.attrs.labels or []), + } + + re_emulator_abi = _compute_emulator_abi(ctx.attrs.labels or []) + if re_emulator_abi != None: + remote_execution_properties["abi"] = re_emulator_abi + + default_executor_override = CommandExecutorConfig( + local_enabled = instrumentation_test_can_run_locally, + remote_enabled = True, + remote_execution_properties = remote_execution_properties, + remote_execution_use_case = _compute_re_use_case(ctx.attrs.labels or []), + ) + dynamic_listing_executor_override = default_executor_override + test_execution_executor_override = default_executor_override + + if ctx.attrs.re_caps and ctx.attrs.re_use_case: + if "dynamic-listing" in ctx.attrs.re_caps and "dynamic-listing" in ctx.attrs.re_use_case: + _validate_executor_override_re_config(ctx.attrs.re_caps["dynamic-listing"], ctx.attrs.re_use_case["dynamic-listing"]) + dynamic_listing_executor_override = CommandExecutorConfig( + local_enabled = instrumentation_test_can_run_locally, + remote_enabled = True, + remote_execution_properties = ctx.attrs.re_caps["dynamic-listing"], + remote_execution_use_case = ctx.attrs.re_use_case["dynamic-listing"], + ) + if "test-execution" in ctx.attrs.re_caps and "test-execution" in ctx.attrs.re_use_case: + _validate_executor_override_re_config(ctx.attrs.re_caps["test-execution"], ctx.attrs.re_use_case["test-execution"]) + test_execution_executor_override = CommandExecutorConfig( + local_enabled = instrumentation_test_can_run_locally, + remote_enabled = True, + remote_execution_properties = ctx.attrs.re_caps["test-execution"], + remote_execution_use_case = ctx.attrs.re_use_case["test-execution"], + ) + + return { + "android-emulator": default_executor_override, + "dynamic-listing": dynamic_listing_executor_override, + "static-listing": CommandExecutorConfig( + local_enabled = True, + remote_enabled = True, + remote_execution_properties = { + "platform": "linux-remote-execution", + }, + remote_execution_use_case = "buck2-default", + ), + "test-execution": test_execution_executor_override, + } + def _compute_emulator_abi(labels: list[str]): emulator_abi_labels = [label for label in labels if label.startswith(ANDROID_EMULATOR_ABI_LABEL_PREFIX)] expect(len(emulator_abi_labels) <= 1, "multiple '{}' labels were found:[{}], there must be only one!".format(ANDROID_EMULATOR_ABI_LABEL_PREFIX, ", ".join(emulator_abi_labels))) @@ -156,3 +250,10 @@ def _compute_re_use_case(labels: list[str]) -> str: return DEFAULT_ANDROID_INSTRUMENTATION_TESTS_USE_CASE else: # len(re_use_case_labels) == 1: return re_use_case_labels[0].replace("re_opts_use_case=", "") + +def _validate_executor_override_re_config(re_caps: dict[str, str], re_use_case: str): + expect(re_use_case in SUPPORTED_USE_CASES, "Unexpected {} use case found, value is expected to be on of the following: {}", re_use_case, ", ".join(SUPPORTED_USE_CASES)) + if "pool" in re_caps: + expect(re_caps["pool"] in SUPPORTED_POOLS, "Unexpected {} pool found, value is expected to be on of the following: {}", re_caps["pool"], ", ".join(SUPPORTED_POOLS)) + if "platform" in re_caps: + expect(re_caps["platform"] in SUPPORTED_PLATFORMS, "Unexpected {} platform found, value is expected to be on of the following: {}", re_caps["platform"], ", ".join(SUPPORTED_PLATFORMS)) diff --git a/prelude/android/android_library.bzl b/prelude/android/android_library.bzl index 5cd5014990e..40f562904a5 100644 --- a/prelude/android/android_library.bzl +++ b/prelude/android/android_library.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load( "@prelude//android:android_providers.bzl", "AndroidLibraryIntellijInfo", @@ -39,7 +40,10 @@ def android_library_impl(ctx: AnalysisContext) -> list[Provider]: }), ] - java_providers, android_library_intellij_info = build_android_library(ctx) + java_providers, android_library_intellij_info = build_android_library( + ctx = ctx, + validation_deps_outputs = get_validation_deps_outputs(ctx), + ) android_providers = [android_library_intellij_info] if android_library_intellij_info else [] return to_list(java_providers) + [ @@ -52,12 +56,17 @@ def android_library_impl(ctx: AnalysisContext) -> list[Provider]: merge_exported_android_resource_info(ctx.attrs.exported_deps), ] + android_providers +def optional_jars(ctx: AnalysisContext) -> list[Artifact]: + return ctx.attrs.android_optional_jars or [] + def build_android_library( ctx: AnalysisContext, - r_dot_java: [Artifact, None] = None, - extra_sub_targets = {}) -> (JavaProviders, [AndroidLibraryIntellijInfo, None]): - bootclasspath_entries = [] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath - additional_classpath_entries = [] + r_dot_java: Artifact | None = None, + extra_sub_targets = {}, + validation_deps_outputs: [list[Artifact], None] = None, + classpath_entries: [list[Artifact], None] = None) -> (JavaProviders, [AndroidLibraryIntellijInfo, None]): + bootclasspath_entries = [] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath + optional_jars(ctx) + additional_classpath_entries = list(classpath_entries) if classpath_entries != None else [] dummy_r_dot_java, android_library_intellij_info = _get_dummy_r_dot_java(ctx) extra_sub_targets = dict(extra_sub_targets) @@ -74,6 +83,7 @@ def build_android_library( additional_classpath_entries = additional_classpath_entries, bootclasspath_entries = bootclasspath_entries, extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ), android_library_intellij_info else: return build_java_library( @@ -82,10 +92,11 @@ def build_android_library( additional_classpath_entries = additional_classpath_entries, bootclasspath_entries = bootclasspath_entries, extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ), android_library_intellij_info def _get_dummy_r_dot_java( - ctx: AnalysisContext) -> ([Artifact, None], [AndroidLibraryIntellijInfo, None]): + ctx: AnalysisContext) -> (Artifact | None, [AndroidLibraryIntellijInfo, None]): android_resources = dedupe([resource for resource in filter(None, [ x.get(AndroidResourceInfo) for x in ctx.attrs.deps + ctx.attrs.provided_deps + (getattr(ctx.attrs, "provided_deps_query", []) or []) diff --git a/prelude/android/android_manifest.bzl b/prelude/android/android_manifest.bzl index 2f1d889f8ba..af7e06791d8 100644 --- a/prelude/android/android_manifest.bzl +++ b/prelude/android/android_manifest.bzl @@ -13,6 +13,7 @@ load( ) load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//android:voltron.bzl", "ROOT_MODULE") +load("@prelude//utils:argfile.bzl", "argfile") def android_manifest_impl(ctx: AnalysisContext) -> list[Provider]: output, merge_report = generate_android_manifest( @@ -49,10 +50,9 @@ def generate_android_manifest( elif type(manifests) == "transitive_set": manifests = manifests.project_as_args("artifacts", ordering = "bfs") - library_manifest_paths_file = ctx.actions.write("{}/library_manifest_paths_file".format(module_name), manifests) + library_manifest_paths_file = argfile(actions = ctx.actions, name = "{}/library_manifest_paths_file".format(module_name), args = manifests) generate_manifest_cmd.add(["--library-manifests-list", library_manifest_paths_file]) - generate_manifest_cmd.hidden(manifests) placeholder_entries_args = cmd_args() for key, val in placeholder_entries.items(): diff --git a/prelude/android/android_prebuilt_aar.bzl b/prelude/android/android_prebuilt_aar.bzl index f0fb0f0e642..0392c71fb41 100644 --- a/prelude/android/android_prebuilt_aar.bzl +++ b/prelude/android/android_prebuilt_aar.bzl @@ -13,6 +13,7 @@ load( "JavaClasspathEntry", "create_abi", "create_java_library_providers", + "generate_java_classpath_snapshot", ) load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") @@ -25,6 +26,7 @@ def android_prebuilt_aar_impl(ctx: AnalysisContext) -> list[Provider]: jni = ctx.actions.declare_output("jni", dir = True) annotation_jars_dir = ctx.actions.declare_output("annotation_jars", dir = True) proguard_config = ctx.actions.declare_output("proguard.txt") + lint_jar = ctx.actions.declare_output("lint.jar") android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] unpack_aar_tool = android_toolchain.unpack_aar[RunInfo] @@ -53,6 +55,8 @@ def android_prebuilt_aar_impl(ctx: AnalysisContext) -> list[Provider]: proguard_config.as_output(), "--jar-builder-tool", jar_builder_tool, + "--lint-jar-path", + lint_jar.as_output(), ] ctx.actions.run(unpack_aar_cmd, category = "android_unpack_aar") @@ -70,23 +74,28 @@ def android_prebuilt_aar_impl(ctx: AnalysisContext) -> list[Provider]: ) abi = None if java_toolchain.is_bootstrap_toolchain else create_abi(ctx.actions, java_toolchain.class_abi_generator, all_classes_jar) + abi_jar_snapshot = generate_java_classpath_snapshot(ctx.actions, java_toolchain.cp_snapshot_generator, abi or all_classes_jar, "") library_output_classpath_entry = JavaClasspathEntry( full_library = all_classes_jar, abi = abi or all_classes_jar, abi_as_dir = None, required_for_source_only_abi = ctx.attrs.required_for_source_only_abi, + abi_jar_snapshot = abi_jar_snapshot, ) - java_library_info, java_packaging_info, shared_library_info, linkable_graph, cxx_resource_info, template_placeholder_info, java_library_intellij_info = create_java_library_providers( + java_library_info, java_packaging_info, global_code_info, shared_library_info, linkable_graph, cxx_resource_info, template_placeholder_info, java_library_intellij_info = create_java_library_providers( ctx = ctx, library_output = library_output_classpath_entry, + global_code_config = java_toolchain.global_code_config, exported_deps = ctx.attrs.deps, provided_deps = ctx.attrs.desugar_deps, needs_desugar = True, is_prebuilt_jar = True, annotation_jars_dir = annotation_jars_dir, proguard_config = proguard_config, + lint_jar = lint_jar, + sources_jar = ctx.attrs.source_jar, ) native_library = PrebuiltNativeLibraryDir( @@ -99,6 +108,7 @@ def android_prebuilt_aar_impl(ctx: AnalysisContext) -> list[Provider]: return [ java_library_info, java_packaging_info, + global_code_info, shared_library_info, cxx_resource_info, linkable_graph, diff --git a/prelude/android/android_providers.bzl b/prelude/android/android_providers.bzl index 774bb24dffa..b47bb853b27 100644 --- a/prelude/android/android_providers.bzl +++ b/prelude/android/android_providers.bzl @@ -34,8 +34,8 @@ ExopackageNativeInfo = record( ) ExopackageResourcesInfo = record( - assets = [Artifact, None], - assets_hash = [Artifact, None], + assets = Artifact | None, + assets_hash = Artifact | None, res = Artifact, res_hash = Artifact, ) @@ -51,11 +51,12 @@ AndroidBinaryNativeLibsInfo = record( # Indicates which shared lib producing targets are included in the binary. Used by instrumentation tests # to exclude those from the test apk. shared_libraries = list[TargetLabel], - exopackage_info = ["ExopackageNativeInfo", None], + exopackage_info = [ExopackageNativeInfo, None], root_module_native_lib_assets = list[Artifact], non_root_module_native_lib_assets = list[Artifact], native_libs_for_primary_apk = list[Artifact], generated_java_code = list[JavaLibraryInfo], + unstripped_shared_libraries = [Artifact, None], ) AndroidBinaryResourcesInfo = record( @@ -66,9 +67,9 @@ AndroidBinaryResourcesInfo = record( # per-module manifests (packaged as assets) module_manifests = list[Artifact], # per-module assets APKs (for .aabs only) - module_assets = [Artifact, None], + module_assets = Artifact | None, # zip containing any strings packaged as assets - packaged_string_assets = [Artifact, None], + packaged_string_assets = Artifact | None, # "APK" containing resources to be used by the Android binary primary_resources_apk = Artifact, # proguard config needed to retain used resources @@ -76,13 +77,13 @@ AndroidBinaryResourcesInfo = record( # R.java jars containing all the linked resources r_dot_java_infos = list[RDotJavaInfo], # directory containing filtered string resources files - string_source_map = [Artifact, None], + string_source_map = Artifact | None, # directory containing filtered string resources files for Voltron language packs - voltron_string_source_map = [Artifact, None], + voltron_string_source_map = Artifact | None, # list of jars that could contain resources that should be packaged into the APK jar_files_that_may_contain_resources = list[Artifact], # The resource infos that are used in this APK - unfiltered_resource_infos = list["AndroidResourceInfo"], + unfiltered_resource_infos = list, # list[AndroidResourceInfo] ) # Information about an `android_build_config` @@ -114,6 +115,7 @@ AndroidApkInfo = provider( "apk": provider_field(typing.Any, default = None), "manifest": provider_field(typing.Any, default = None), "materialized_artifacts": provider_field(typing.Any, default = None), + "unstripped_shared_libraries": provider_field(typing.Any, default = None), # artifact }, ) @@ -219,11 +221,11 @@ ExportedAndroidResourceInfo = provider( DexFilesInfo = record( primary_dex = Artifact, - primary_dex_class_names = [Artifact, None], + primary_dex_class_names = Artifact | None, root_module_secondary_dex_dirs = list[Artifact], non_root_module_secondary_dex_dirs = list[Artifact], secondary_dex_exopackage_info = [ExopackageDexInfo, None], - proguard_text_files_path = [Artifact, None], + proguard_text_files_path = Artifact | None, ) ExopackageInfo = record( @@ -246,7 +248,7 @@ def merge_android_packageable_info( actions: AnalysisActions, deps: list[Dependency], build_config_info: [AndroidBuildConfigInfo, None] = None, - manifest: [Artifact, None] = None, + manifest: Artifact | None = None, prebuilt_native_library_dir: [PrebuiltNativeLibraryDir, None] = None, resource_info: [AndroidResourceInfo, None] = None, for_primary_apk: bool = False) -> AndroidPackageableInfo: diff --git a/prelude/android/android_resource.bzl b/prelude/android/android_resource.bzl index e51a21cfe95..e544acf8cce 100644 --- a/prelude/android/android_resource.bzl +++ b/prelude/android/android_resource.bzl @@ -5,14 +5,16 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//java:java_providers.bzl", "get_java_packaging_info") +load("@prelude//java:java_providers.bzl", "derive_compiling_deps", "get_global_code_info", "get_java_packaging_info") +load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") +load("@prelude//utils:argfile.bzl", "argfile") load("@prelude//utils:expect.bzl", "expect") load(":android_providers.bzl", "AndroidResourceInfo", "ExportedAndroidResourceInfo", "RESOURCE_PRIORITY_NORMAL", "merge_android_packageable_info") load(":android_toolchain.bzl", "AndroidToolchainInfo") JAVA_PACKAGE_FILENAME = "java_package.txt" -def _convert_to_artifact_dir(ctx: AnalysisContext, attr: [Dependency, dict, Artifact, None], attr_name: str) -> [Artifact, None]: +def _convert_to_artifact_dir(ctx: AnalysisContext, attr: [Dependency, dict, Artifact, None], attr_name: str) -> Artifact | None: if isinstance(attr, Dependency): expect(len(attr[DefaultInfo].default_outputs) == 1, "Expect one default output from build dep of attr {}!".format(attr_name)) return attr[DefaultInfo].default_outputs[0] @@ -73,6 +75,8 @@ def android_resource_impl(ctx: AnalysisContext) -> list[Provider]: providers.append(merge_android_packageable_info(ctx.label, ctx.actions, ctx.attrs.deps, manifest = ctx.attrs.manifest, resource_info = resource_info)) providers.append(get_java_packaging_info(ctx, ctx.attrs.deps)) providers.append(DefaultInfo(default_output = default_output, sub_targets = sub_targets)) + compiling_deps = derive_compiling_deps(ctx.actions, None, ctx.attrs.deps) + providers.append(get_global_code_info(ctx, ctx.attrs.deps, ctx.attrs.deps, derive_compiling_deps(ctx.actions, None, []), compiling_deps, compiling_deps, ctx.attrs._java_toolchain[JavaToolchainInfo].global_code_config)) return providers @@ -82,20 +86,20 @@ def aapt2_compile( android_toolchain: AndroidToolchainInfo, skip_crunch_pngs: bool = False, identifier: [str, None] = None) -> Artifact: - aapt2_command = cmd_args(android_toolchain.aapt2) - aapt2_command.add("compile") - aapt2_command.add("--legacy") + aapt2_command = [cmd_args(android_toolchain.aapt2)] + aapt2_command.append("compile") + aapt2_command.append("--legacy") if skip_crunch_pngs: - aapt2_command.add("--no-crunch") - aapt2_command.add(["--dir", resources_dir]) + aapt2_command.append("--no-crunch") + aapt2_command.extend(["--dir", resources_dir]) aapt2_output = ctx.actions.declare_output("{}_resources.flata".format(identifier) if identifier else "resources.flata") - aapt2_command.add("-o", aapt2_output.as_output()) + aapt2_command.extend(["-o", aapt2_output.as_output()]) - ctx.actions.run(aapt2_command, category = "aapt2_compile", identifier = identifier) + ctx.actions.run(cmd_args(aapt2_command), category = "aapt2_compile", identifier = identifier) return aapt2_output -def _get_package(ctx: AnalysisContext, package: [str, None], manifest: [Artifact, None]) -> Artifact: +def _get_package(ctx: AnalysisContext, package: [str, None], manifest: Artifact | None) -> Artifact: if package: return ctx.actions.write(JAVA_PACKAGE_FILENAME, package) else: @@ -104,9 +108,13 @@ def _get_package(ctx: AnalysisContext, package: [str, None], manifest: [Artifact def extract_package_from_manifest(ctx: AnalysisContext, manifest: Artifact) -> Artifact: r_dot_java_package = ctx.actions.declare_output(JAVA_PACKAGE_FILENAME) - extract_package_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].manifest_utils[RunInfo]) - extract_package_cmd.add(["--manifest-path", manifest]) - extract_package_cmd.add(["--package-output", r_dot_java_package.as_output()]) + extract_package_cmd = cmd_args( + ctx.attrs._android_toolchain[AndroidToolchainInfo].manifest_utils[RunInfo], + "--manifest-path", + manifest, + "--package-output", + r_dot_java_package.as_output(), + ) ctx.actions.run(extract_package_cmd, category = "android_extract_package") @@ -125,10 +133,9 @@ def get_text_symbols( dep_symbols = _get_dep_symbols(deps) dep_symbol_paths.add(dep_symbols) - dep_symbol_paths_file, _ = ctx.actions.write("{}_dep_symbol_paths_file".format(identifier) if identifier else "dep_symbol_paths_file", dep_symbol_paths, allow_args = True) + dep_symbol_paths_file = argfile(actions = ctx.actions, name = "{}_dep_symbol_paths_file".format(identifier) if identifier else "dep_symbol_paths_file", args = dep_symbol_paths, allow_args = True) mini_aapt_cmd.add(["--dep-symbol-paths", dep_symbol_paths_file]) - mini_aapt_cmd.hidden(dep_symbols) text_symbols = ctx.actions.declare_output("{}_R.txt".format(identifier) if identifier else "R.txt") mini_aapt_cmd.add(["--output-path", text_symbols.as_output()]) diff --git a/prelude/android/android_toolchain.bzl b/prelude/android/android_toolchain.bzl index 89059e948be..7142a94cd54 100644 --- a/prelude/android/android_toolchain.bzl +++ b/prelude/android/android_toolchain.bzl @@ -19,14 +19,18 @@ AndroidToolchainInfo = provider( "aidl": provider_field(typing.Any, default = None), "android_jar": provider_field(typing.Any, default = None), "android_bootclasspath": provider_field(typing.Any, default = None), + "android_optional_jars": provider_field(typing.Any, default = None), "apk_builder": provider_field(typing.Any, default = None), "apk_module_graph": provider_field(typing.Any, default = None), "app_without_resources_stub": provider_field(typing.Any, default = None), + "bundle_apks_builder": provider_field(typing.Any, default = None), "bundle_builder": provider_field(typing.Any, default = None), "combine_native_library_dirs": provider_field(typing.Any, default = None), - "compress_libraries": provider_field(typing.Any, default = None), + "cross_module_native_deps_check": provider_field( + typing.Any, + default = None, + ), "d8_command": provider_field(typing.Any, default = None), - "enabled_voltron_non_asset_libs": provider_field(typing.Any, default = None), "exo_resources_rewriter": provider_field(typing.Any, default = None), "exopackage_agent_apk": provider_field(typing.Any, default = None), "filter_dex_class_names": provider_field(typing.Any, default = None), @@ -50,6 +54,7 @@ AndroidToolchainInfo = provider( "mini_aapt": provider_field(typing.Any, default = None), "native_libs_as_assets_metadata": provider_field(typing.Any, default = None), "optimized_proguard_config": provider_field(typing.Any, default = None), + "p7zip": provider_field(typing.Any, default = None), "package_meta_inf_version_files": provider_field(typing.Any, default = None), "package_strings_as_assets": provider_field(typing.Any, default = None), "prebuilt_aar_resources_have_low_priority": provider_field(typing.Any, default = None), diff --git a/prelude/android/apk_genrule.bzl b/prelude/android/apk_genrule.bzl index 0c50c05d859..adcefa21fc6 100644 --- a/prelude/android/apk_genrule.bzl +++ b/prelude/android/apk_genrule.bzl @@ -8,12 +8,18 @@ load("@prelude//:genrule.bzl", "process_genrule") load("@prelude//android:android_apk.bzl", "get_install_info") load("@prelude//android:android_providers.bzl", "AndroidAabInfo", "AndroidApkInfo", "AndroidApkUnderTestInfo") +load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//android:bundletool_util.bzl", "derive_universal_apk") +load("@prelude//java:java_providers.bzl", "KeystoreInfo") load("@prelude//utils:expect.bzl", "expect") load("@prelude//java/class_to_srcs.bzl", "JavaClassToSourceMapInfo") def apk_genrule_impl(ctx: AnalysisContext) -> list[Provider]: expect((ctx.attrs.apk == None) != (ctx.attrs.aab == None), "Exactly one of 'apk' and 'aab' must be specified") + input_android_apk_under_test_info = None + input_unstripped_shared_libraries = None + input_android_apk_subtargets = None if ctx.attrs.apk != None: # TODO(T104150125) The underlying APK should not have exopackage enabled input_android_apk_info = ctx.attrs.apk[AndroidApkInfo] @@ -21,7 +27,13 @@ def apk_genrule_impl(ctx: AnalysisContext) -> list[Provider]: input_apk = input_android_apk_info.apk input_manifest = input_android_apk_info.manifest input_materialized_artifacts = input_android_apk_info.materialized_artifacts + input_unstripped_shared_libraries = input_android_apk_info.unstripped_shared_libraries input_android_apk_under_test_info = ctx.attrs.apk[AndroidApkUnderTestInfo] + input_android_apk_subtargets = ctx.attrs.apk[DefaultInfo].sub_targets + + env_vars = { + "APK": cmd_args(input_apk), + } else: input_android_aab_info = ctx.attrs.aab[AndroidAabInfo] expect(input_android_aab_info != None, "'aab' attribute must be an Android Bundle!") @@ -31,35 +43,103 @@ def apk_genrule_impl(ctx: AnalysisContext) -> list[Provider]: input_manifest = input_android_aab_info.manifest input_materialized_artifacts = input_android_aab_info.materialized_artifacts - env_vars = { - "APK": cmd_args(input_apk), - } + env_vars = { + "AAB": cmd_args(input_apk), + } - # Like buck1, we ignore the 'out' attribute and construct the output path ourselves. - output_apk_name = "{}.apk".format(ctx.label.name) + genrule_providers = process_genrule(ctx, ctx.attrs.out, ctx.attrs.outs, env_vars, other_outputs = input_materialized_artifacts) - genrule_providers = process_genrule(ctx, output_apk_name, None, env_vars, other_outputs = input_materialized_artifacts) + genrule_default_info = filter(lambda x: isinstance(x, DefaultInfo), genrule_providers) expect( - len(genrule_providers) == 1 and isinstance(genrule_providers[0], DefaultInfo), - "Expecting just a single DefaultInfo, but got {}".format(genrule_providers), + len(genrule_default_info) == 1, + "Expecting a single DefaultInfo, but got {}", + genrule_default_info, ) - output_apk = genrule_providers[0].default_outputs[0] - class_to_src_map = [ctx.attrs.apk[JavaClassToSourceMapInfo]] if (ctx.attrs.apk and JavaClassToSourceMapInfo in ctx.attrs.apk) else [] + genrule_default_output = genrule_default_info[0].default_outputs[0] + genrule_default_output_is_aab = genrule_default_output.extension == ".aab" + genrule_default_output_is_apk = genrule_default_output.extension == ".apk" - install_info = get_install_info( - ctx, - output_apk = output_apk, - manifest = input_manifest, - exopackage_info = None, + expect( + genrule_default_output_is_aab or genrule_default_output_is_apk, + "apk_genrule must output a '.apk' or '.aab' file, but got {}", + genrule_default_info, ) - return genrule_providers + [ - AndroidApkInfo( - apk = output_apk, - manifest = input_manifest, - materialized_artifacts = input_materialized_artifacts, - ), - install_info, - ] + filter(None, [input_android_apk_under_test_info]) + class_to_src_map + if ctx.attrs.aab: + if genrule_default_output_is_aab: + output_aab_info = AndroidAabInfo( + aab = genrule_default_output, + manifest = input_manifest, + materialized_artifacts = input_materialized_artifacts, + ) + output_apk = None + else: + output_aab_info = None + output_apk = genrule_default_output + + if ctx.attrs.use_derived_apk: + expect(genrule_default_output_is_aab, "Default genrule output must end in '.aab' if use_derived_apk is True.") + + output_apk = derive_universal_apk( + ctx = ctx, + android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo], + app_bundle = genrule_default_output, + keystore = ctx.attrs.keystore[KeystoreInfo] if ctx.attrs.keystore else None, + ) + default_providers = [ + DefaultInfo( + default_output = output_apk, + other_outputs = input_materialized_artifacts + genrule_default_info[0].other_outputs, + sub_targets = { + "aab": [DefaultInfo( + default_outputs = [genrule_default_output], + )], + }, + ), + ] + filter(lambda x: not isinstance(x, DefaultInfo), genrule_providers) + else: + default_providers = genrule_providers + + else: + sub_targets = {k: [v[DefaultInfo]] for k, v in genrule_default_info[0].sub_targets.items()} + sub_targets.update({ + "unstripped_native_libraries": [input_android_apk_subtargets["unstripped_native_libraries"][DefaultInfo]], + "unstripped_native_libraries_json": [input_android_apk_subtargets["unstripped_native_libraries_json"][DefaultInfo]], + }) + expect(genrule_default_output_is_apk, "apk_genrule output must end in '.apk'") + output_apk = genrule_default_output + output_aab_info = None + default_providers = [ + DefaultInfo( + default_output = output_apk, + other_outputs = genrule_default_info[0].other_outputs, + sub_targets = sub_targets, + ), + ] + filter(lambda x: not isinstance(x, DefaultInfo), genrule_providers) + + class_to_src_map = [ctx.attrs.apk[JavaClassToSourceMapInfo]] if (ctx.attrs.apk and JavaClassToSourceMapInfo in ctx.attrs.apk) else [] + + if output_apk: + apk_providers = [ + AndroidApkInfo( + apk = output_apk, + manifest = input_manifest, + materialized_artifacts = input_materialized_artifacts, + unstripped_shared_libraries = input_unstripped_shared_libraries, + ), + get_install_info( + ctx, + output_apk = output_apk, + manifest = input_manifest, + exopackage_info = None, + ), + ] + else: + apk_providers = [] + + aab_providers = filter(None, [output_aab_info]) + apk_under_test_providers = filter(None, [input_android_apk_under_test_info]) + + return default_providers + apk_providers + aab_providers + apk_under_test_providers + class_to_src_map diff --git a/prelude/android/bundletool_util.bzl b/prelude/android/bundletool_util.bzl new file mode 100644 index 00000000000..9105e1ccbe1 --- /dev/null +++ b/prelude/android/bundletool_util.bzl @@ -0,0 +1,42 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//java:java_providers.bzl", "KeystoreInfo") # @unused used as type + +def derive_universal_apk( + ctx: AnalysisContext, + android_toolchain: AndroidToolchainInfo, + app_bundle: Artifact, + keystore: [KeystoreInfo, None]) -> Artifact: + output_apk = ctx.actions.declare_output("universal.apk") + + bundle_apks_builder_args = cmd_args([ + android_toolchain.bundle_apks_builder[RunInfo], + "--input-bundle", + app_bundle, + "--p7zip", + android_toolchain.p7zip, + "--aapt2", + android_toolchain.aapt2, + "--zipalign", + android_toolchain.zipalign[RunInfo], + "--output-apk", + output_apk.as_output(), + ]) + + if keystore: + bundle_apks_builder_args.add(cmd_args([ + "--keystore", + keystore.store, + "--keystore-properties", + keystore.properties, + ])) + + ctx.actions.run(bundle_apks_builder_args, category = "bundle_build", identifier = "build_universal_apk") + + return output_apk diff --git a/prelude/android/configuration.bzl b/prelude/android/configuration.bzl index 70ad1ef967e..54b95cc3b97 100644 --- a/prelude/android/configuration.bzl +++ b/prelude/android/configuration.bzl @@ -7,6 +7,7 @@ load("@prelude//android:cpu_filters.bzl", "ALL_CPU_FILTERS", "CPU_FILTER_FOR_DEFAULT_PLATFORM", "CPU_FILTER_FOR_PRIMARY_PLATFORM") load("@prelude//android:min_sdk_version.bzl", "get_min_sdk_version_constraint_value_name", "get_min_sdk_version_range") +load("@prelude//cfg/modifier:name.bzl", "cfg_name") load("@prelude//utils:expect.bzl", "expect") # Android binaries (APKs or AABs) can be built for one or more different platforms. buck2 supports @@ -73,6 +74,13 @@ def _cpu_split_transition( if len(cpu_filters) == 1 and cpu_filters[0] == "default": default = refs.default_platform[PlatformInfo] + + # Use `cfg_name` function from modifier resolution so that we get the same cfg as default cfg + # of android libraries. + default = PlatformInfo( + label = cfg_name(default.configuration), + configuration = default.configuration, + ) return {CPU_FILTER_FOR_DEFAULT_PLATFORM: default} expect(CPU_FILTER_FOR_PRIMARY_PLATFORM == "arm64") @@ -108,12 +116,16 @@ def _cpu_split_transition( if len(new_configs) > 0: updated_constraints[refs.maybe_build_only_native_code[ConstraintSettingInfo].label] = refs.build_only_native_code[ConstraintValueInfo] + cfg_info = ConfigurationInfo( + constraints = updated_constraints, + values = platform.configuration.values, + ) + + # Use `cfg_name` function from modifier resolution so that we get the same cfg as default cfg + # of android libraries. new_configs[platform_name] = PlatformInfo( - label = platform_name, - configuration = ConfigurationInfo( - constraints = updated_constraints, - values = platform.configuration.values, - ), + label = cfg_name(cfg_info), + configuration = cfg_info, ) return new_configs diff --git a/prelude/android/constraints/BUCK.v2 b/prelude/android/constraints/BUCK.v2 index 4efea2b3e27..a211f28db06 100644 --- a/prelude/android/constraints/BUCK.v2 +++ b/prelude/android/constraints/BUCK.v2 @@ -1,28 +1,35 @@ load("@prelude//android:min_sdk_version.bzl", "get_min_sdk_version_constraint_value_name", "get_min_sdk_version_range") +load("@prelude//utils:source_listing.bzl", "source_listing") -native.constraint_setting( +oncall("build_infra") + +source_listing() + +prelude = native # Avoid warnings and auto-formatters + +prelude.constraint_setting( name = "maybe_build_only_native_code", visibility = ["PUBLIC"], ) -native.constraint_value( +prelude.constraint_value( name = "build_only_native_code", constraint_setting = ":maybe_build_only_native_code", visibility = ["PUBLIC"], ) -native.constraint_setting( +prelude.constraint_setting( name = "maybe_merge_native_libraries", visibility = ["PUBLIC"], ) -native.constraint_value( +prelude.constraint_value( name = "merge_native_libraries", constraint_setting = ":maybe_merge_native_libraries", visibility = ["PUBLIC"], ) -native.filegroup( +prelude.filegroup( name = "files", srcs = glob( ["**"], @@ -30,13 +37,13 @@ native.filegroup( visibility = ["PUBLIC"], ) -native.constraint_setting( +prelude.constraint_setting( name = "min_sdk_version", visibility = ["PUBLIC"], ) [ - native.constraint_value( + prelude.constraint_value( name = get_min_sdk_version_constraint_value_name(min_sdk), constraint_setting = ":min_sdk_version", ) diff --git a/prelude/android/dex_rules.bzl b/prelude/android/dex_rules.bzl index 02184f17445..f1928a4df6b 100644 --- a/prelude/android/dex_rules.bzl +++ b/prelude/android/dex_rules.bzl @@ -11,6 +11,7 @@ load("@prelude//android:voltron.bzl", "ROOT_MODULE", "get_apk_module_graph_info" load("@prelude//java:dex.bzl", "DexLibraryInfo", "get_dex_produced_from_java_library") load("@prelude//java:dex_toolchain.bzl", "DexToolchainInfo") load("@prelude//java:java_library.bzl", "compile_to_jar") +load("@prelude//utils:argfile.bzl", "argfile", "at_argfile") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "flatten") load("@prelude//paths.bzl", "paths") @@ -102,15 +103,14 @@ def get_single_primary_dex( output_dex_file = ctx.actions.declare_output("classes.dex") d8_cmd.add(["--output-dex-file", output_dex_file.as_output()]) - jar_to_dex_file = ctx.actions.write("jar_to_dex_file.txt", java_library_jars) + jar_to_dex_file = argfile(actions = ctx.actions, name = "jar_to_dex_file.txt", args = java_library_jars) d8_cmd.add(["--files-to-dex-list", jar_to_dex_file]) - d8_cmd.hidden(java_library_jars) d8_cmd.add(["--android-jar", android_toolchain.android_jar]) if not is_optimized: d8_cmd.add("--no-optimize") - ctx.actions.run(d8_cmd, category = "d8", identifier = "{}:{}".format(ctx.label.package, ctx.label.name)) + ctx.actions.run(d8_cmd, category = "get_single_primary_dex", identifier = "{}:{}".format(ctx.label.package, ctx.label.name)) return DexFilesInfo( primary_dex = output_dex_file, @@ -126,10 +126,10 @@ def get_multi_dex( android_toolchain: AndroidToolchainInfo, java_library_jars_to_owners: dict[Artifact, TargetLabel], primary_dex_patterns: list[str], - proguard_configuration_output_file: [Artifact, None] = None, - proguard_mapping_output_file: [Artifact, None] = None, + proguard_configuration_output_file: Artifact | None = None, + proguard_mapping_output_file: Artifact | None = None, is_optimized: bool = False, - apk_module_graph_file: [Artifact, None] = None) -> DexFilesInfo: + apk_module_graph_file: Artifact | None = None) -> DexFilesInfo: expect( not _is_exopackage_enabled_for_secondary_dex(ctx), "secondary dex exopackage can only be enabled on pre-dexed builds!", @@ -154,7 +154,7 @@ def get_multi_dex( secondary_dex_dir_srcs = {} all_jars = flatten(module_to_jars.values()) - all_jars_list = ctx.actions.write("all_jars_classpath.txt", all_jars) + all_jars_list = argfile(actions = ctx.actions, name = "all_jars_classpath.txt", args = all_jars) for module, jars in module_to_jars.items(): multi_dex_cmd = cmd_args(android_toolchain.multi_dex_command[RunInfo]) secondary_dex_compression_cmd = cmd_args(android_toolchain.secondary_dex_compression_command[RunInfo]) @@ -175,9 +175,8 @@ def get_multi_dex( android_toolchain, ) - primary_dex_jar_to_dex_file = ctx.actions.write("primary_dex_jars_to_dex_file_for_root_module.txt", primary_dex_jars) + primary_dex_jar_to_dex_file = argfile(actions = ctx.actions, name = "primary_dex_jars_to_dex_file_for_root_module.txt", args = primary_dex_jars) multi_dex_cmd.add("--primary-dex-files-to-dex-list", primary_dex_jar_to_dex_file) - multi_dex_cmd.hidden(primary_dex_jars) multi_dex_cmd.add("--minimize-primary-dex") else: jars_to_dex = jars @@ -194,16 +193,14 @@ def get_multi_dex( secondary_dex_compression_cmd.add("--secondary-dex-output-dir", secondary_dex_dir_for_module.as_output()) jars_to_dex = jars multi_dex_cmd.add("--classpath-files", all_jars_list) - multi_dex_cmd.hidden(all_jars) multi_dex_cmd.add("--module", module) multi_dex_cmd.add("--canary-class-name", apk_module_graph_info.module_to_canary_class_name_function(module)) secondary_dex_compression_cmd.add("--module", module) secondary_dex_compression_cmd.add("--canary-class-name", apk_module_graph_info.module_to_canary_class_name_function(module)) - jar_to_dex_file = ctx.actions.write("jars_to_dex_file_for_module_{}.txt".format(module), jars_to_dex) + jar_to_dex_file = argfile(actions = ctx.actions, name = "jars_to_dex_file_for_module_{}.txt".format(module), args = jars_to_dex) multi_dex_cmd.add("--files-to-dex-list", jar_to_dex_file) - multi_dex_cmd.hidden(jars_to_dex) multi_dex_cmd.add("--android-jar", android_toolchain.android_jar) if not is_optimized: @@ -222,7 +219,7 @@ def get_multi_dex( ctx.actions.symlinked_dir(outputs[secondary_dex_dir], secondary_dex_dir_srcs) - ctx.actions.dynamic_output(dynamic = inputs, inputs = [], outputs = outputs, f = do_multi_dex) + ctx.actions.dynamic_output(dynamic = inputs, inputs = [], outputs = [o.as_output() for o in outputs], f = do_multi_dex) return DexFilesInfo( primary_dex = primary_dex_file, @@ -238,8 +235,8 @@ def _get_primary_dex_and_secondary_dex_jars( jars: list[Artifact], java_library_jars_to_owners: dict[Artifact, TargetLabel], primary_dex_patterns_file: Artifact, - proguard_configuration_output_file: [Artifact, None], - proguard_mapping_output_file: [Artifact, None], + proguard_configuration_output_file: Artifact | None, + proguard_mapping_output_file: Artifact | None, android_toolchain: AndroidToolchainInfo) -> (list[Artifact], list[Artifact]): primary_dex_jars = [] secondary_dex_jars = [] @@ -321,7 +318,7 @@ DexInputsWithClassNamesAndWeightEstimatesFile = record( SecondaryDexMetadataConfig = record( secondary_dex_compression = str, secondary_dex_metadata_path = [str, None], - secondary_dex_metadata_file = [Artifact, None], + secondary_dex_metadata_file = Artifact | None, secondary_dex_metadata_line = Artifact, secondary_dex_canary_class_name = str, ) @@ -377,11 +374,14 @@ def _filter_pre_dexed_libs( "--output", weight_estimate_and_filtered_class_names_file.as_output(), ]) - filter_dex_cmd_argsfile = actions.write("filter_dex_cmd_args_{}".format(batch_number), filter_dex_cmd_args) filter_dex_cmd = cmd_args([ android_toolchain.filter_dex_class_names[RunInfo], - cmd_args(filter_dex_cmd_argsfile, format = "@{}").hidden(filter_dex_cmd_args), + at_argfile( + actions = actions, + name = "filter_dex_cmd_args_{}".format(batch_number), + args = filter_dex_cmd_args, + ), ]) actions.run(filter_dex_cmd, category = "filter_dex", identifier = "batch_{}".format(batch_number)) @@ -398,7 +398,7 @@ def merge_to_split_dex( android_toolchain: AndroidToolchainInfo, pre_dexed_libs: list[DexLibraryInfo], split_dex_merge_config: SplitDexMergeConfig, - apk_module_graph_file: [Artifact, None] = None) -> DexFilesInfo: + apk_module_graph_file: Artifact | None = None) -> DexFilesInfo: is_exopackage_enabled_for_secondary_dex = _is_exopackage_enabled_for_secondary_dex(ctx) if is_exopackage_enabled_for_secondary_dex: expect( @@ -554,7 +554,7 @@ def merge_to_split_dex( metadata_lines.append(artifacts[metadata_line_artifact].read_string().strip()) ctx.actions.write(outputs[metadata_dot_txt], metadata_lines) - ctx.actions.dynamic_output(dynamic = flatten(metadata_line_artifacts_by_module.values()), inputs = [], outputs = metadata_dot_txt_files_by_module.values(), f = write_metadata_dot_txts) + ctx.actions.dynamic_output(dynamic = flatten(metadata_line_artifacts_by_module.values()), inputs = [], outputs = [o.as_output() for o in metadata_dot_txt_files_by_module.values()], f = write_metadata_dot_txts) ctx.actions.symlinked_dir( outputs[root_module_secondary_dexes_dir], @@ -565,7 +565,7 @@ def merge_to_split_dex( non_root_module_secondary_dexes_for_symlinking, ) - ctx.actions.dynamic_output(dynamic = input_artifacts, inputs = [], outputs = outputs, f = merge_pre_dexed_libs) + ctx.actions.dynamic_output(dynamic = input_artifacts, inputs = [], outputs = [o.as_output() for o in outputs], f = merge_pre_dexed_libs) if is_exopackage_enabled_for_secondary_dex: root_module_secondary_dex_dirs = [] @@ -592,15 +592,14 @@ def _merge_dexes( output_dex_file: Artifact, pre_dexed_artifacts: list[Artifact], pre_dexed_artifacts_file: Artifact, - class_names_to_include: [Artifact, None] = None, - secondary_output_dex_file: [Artifact, None] = None, + class_names_to_include: Artifact | None = None, + secondary_output_dex_file: Artifact | None = None, secondary_dex_metadata_config: [SecondaryDexMetadataConfig, None] = None): d8_cmd = cmd_args(android_toolchain.d8_command[RunInfo]) d8_cmd.add(["--output-dex-file", output_dex_file.as_output()]) - pre_dexed_artifacts_to_dex_file = ctx.actions.write(pre_dexed_artifacts_file.as_output(), pre_dexed_artifacts) + pre_dexed_artifacts_to_dex_file = argfile(actions = ctx.actions, name = pre_dexed_artifacts_file, args = pre_dexed_artifacts) d8_cmd.add(["--files-to-dex-list", pre_dexed_artifacts_to_dex_file]) - d8_cmd.hidden(pre_dexed_artifacts) d8_cmd.add(["--android-jar", android_toolchain.android_jar]) d8_cmd.add(_DEX_MERGE_OPTIONS) @@ -620,7 +619,7 @@ def _merge_dexes( ctx.actions.run( d8_cmd, - category = "d8", + category = "merge_dexes", identifier = "{}:{} {}".format(ctx.label.package, ctx.label.name, output_dex_file.short_path), ) diff --git a/prelude/android/gen_aidl.bzl b/prelude/android/gen_aidl.bzl index 2e5e0297f33..a2b56d27b7f 100644 --- a/prelude/android/gen_aidl.bzl +++ b/prelude/android/gen_aidl.bzl @@ -15,14 +15,14 @@ _AidlSourceInfo = provider(fields = { def gen_aidl_impl(ctx: AnalysisContext) -> list[Provider]: android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] - aidl_cmd = cmd_args(android_toolchain.aidl) - aidl_cmd.add("-p", android_toolchain.framework_aidl_file) - aidl_cmd.add("-I", ctx.attrs.import_path) - for path in ctx.attrs.import_paths: - aidl_cmd.add("-I", path) - - # We need the `aidl_srcs` files - otherwise the search on the `import_path` won't find anything. - aidl_cmd.hidden(ctx.attrs.aidl_srcs) + aidl_cmd = cmd_args( + [android_toolchain.aidl] + + ["-p", android_toolchain.framework_aidl_file] + + ["-I", ctx.attrs.import_path] + + [a for path in ctx.attrs.import_paths for a in ["-I", path]], + # We need the `aidl_srcs` files - otherwise the search on the `import_path` won't find anything. + hidden = ctx.attrs.aidl_srcs, + ) # Allow gen_aidl rules to depend on other gen_aidl rules, and make the source files from the # deps accessible in this context. This is an alternative to adding dependent files in @@ -35,7 +35,7 @@ def gen_aidl_impl(ctx: AnalysisContext) -> list[Provider]: else: warning("`{}` dependency `{}` is not a `gen_aidl` rule and will be ignored".format(ctx.label, dep.label)) - aidl_cmd.hidden(dep_srcs) + aidl_cmd.add(cmd_args(hidden = dep_srcs)) aidl_out = ctx.actions.declare_output("aidl_output", dir = True) aidl_cmd.add("-o", aidl_out.as_output()) diff --git a/prelude/android/preprocess_java_classes.bzl b/prelude/android/preprocess_java_classes.bzl index 5a94c0a800e..4f4e8cc7059 100644 --- a/prelude/android/preprocess_java_classes.bzl +++ b/prelude/android/preprocess_java_classes.bzl @@ -6,34 +6,42 @@ # of this source tree. load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//android:util.bzl", "EnhancementContext") load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") load("@prelude//utils:expect.bzl", "expect") -def get_preprocessed_java_classes(ctx: AnalysisContext, input_jars = {"artifact": "target_label"}) -> (dict[Artifact, TargetLabel], [Artifact, None]): +def get_preprocessed_java_classes(enhance_ctx: EnhancementContext, input_jars: dict[Artifact, TargetLabel]) -> (dict[Artifact, TargetLabel], Artifact | None): if not input_jars: return {}, None + ctx = enhance_ctx.ctx + input_srcs = {} output_jars_to_owners = {} output_dir = ctx.actions.declare_output("preprocessed_java_classes/output_dir") + input_jars_to_owners = {} for i, (input_jar, target_label) in enumerate(input_jars.items()): expect(input_jar.extension == ".jar", "Expected {} to have extension .jar!".format(input_jar)) jar_name = "{}_{}".format(i, input_jar.basename) input_srcs[jar_name] = input_jar + input_jars_to_owners[jar_name] = target_label output_jar = output_dir.project(jar_name) output_jars_to_owners[output_jar] = target_label input_dir = ctx.actions.symlinked_dir("preprocessed_java_classes/input_dir", input_srcs) + input_jars_map = ctx.actions.write_json("preprocessed_java_classes/input_jars_map.json", input_jars_to_owners) materialized_artifacts_dir = ctx.actions.declare_output("preprocessed_java_classes/materialized_artifacts") + android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] env = { "ANDROID_BOOTCLASSPATH": cmd_args( - ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath, + android_toolchain.android_bootclasspath + android_toolchain.android_optional_jars, delimiter = get_path_separator_for_exec_os(ctx), ), "IN_JARS_DIR": cmd_args(input_dir), + "IN_JARS_MAP": cmd_args(input_jars_map), "MATERIALIZED_ARTIFACTS_DIR": materialized_artifacts_dir.as_output(), "OUT_JARS_DIR": output_dir.as_output(), "PREPROCESS": ctx.attrs.preprocess_java_classes_bash, @@ -53,11 +61,19 @@ def get_preprocessed_java_classes(ctx: AnalysisContext, input_jars = {"artifact" output_jars_file, ] - preprocess_cmd = cmd_args(preprocess_cmd) - preprocess_cmd.hidden([output_jar.as_output() for output_jar in output_jars]) - for dep in ctx.attrs.preprocess_java_classes_deps: - preprocess_cmd.hidden(dep[DefaultInfo].default_outputs + dep[DefaultInfo].other_outputs) + preprocess_cmd = cmd_args( + preprocess_cmd, + hidden = [output_jar.as_output() for output_jar in output_jars] + + [ + dep[DefaultInfo].default_outputs + dep[DefaultInfo].other_outputs + for dep in ctx.attrs.preprocess_java_classes_deps + ], + ) ctx.actions.run(preprocess_cmd, env = env, category = "preprocess_java_classes") + enhance_ctx.debug_output("preprocess_java_classes_input_dir", input_dir) + enhance_ctx.debug_output("preprocess_java_classes_input_jars_map", input_jars_map) + enhance_ctx.debug_output("preprocess_java_classes_materialized_artifacts_dir", materialized_artifacts_dir) + return output_jars_to_owners, materialized_artifacts_dir diff --git a/prelude/android/proguard.bzl b/prelude/android/proguard.bzl index c796fb3390b..e51278988df 100644 --- a/prelude/android/proguard.bzl +++ b/prelude/android/proguard.bzl @@ -17,7 +17,7 @@ load("@prelude//utils:expect.bzl", "expect") ProguardOutput = record( jars_to_owners = dict[Artifact, TargetLabel], - proguard_configuration_output_file = [Artifact, None], + proguard_configuration_output_file = Artifact | None, proguard_mapping_output_file = Artifact, proguard_artifacts = list[Artifact], proguard_hidden_artifacts = list[Artifact], @@ -29,9 +29,9 @@ def _get_proguard_command_line_args( proguard_configs: list[Artifact], additional_library_jars: list[Artifact], mapping: Artifact, - configuration: [Artifact, None], - seeds: [Artifact, None], - usage: [Artifact, None], + configuration: Artifact | None, + seeds: Artifact | None, + usage: Artifact | None, android_toolchain: AndroidToolchainInfo) -> (cmd_args, list[Artifact]): cmd = cmd_args() hidden = [] @@ -56,7 +56,7 @@ def _get_proguard_command_line_args( for jar_input, jar_output in input_jars_to_output_jars.items(): cmd.add("-injars", jar_input, "-outjars", jar_output if jar_output == jar_input else jar_output.as_output()) - library_jars = android_toolchain.android_bootclasspath + additional_library_jars + library_jars = android_toolchain.android_bootclasspath + android_toolchain.android_optional_jars + additional_library_jars cmd.add("-libraryjars") cmd.add(cmd_args(library_jars, delimiter = get_path_separator_for_exec_os(ctx))) hidden.extend(library_jars) @@ -89,8 +89,9 @@ def run_proguard( "-jar", android_toolchain.proguard_jar, ) - run_proguard_cmd.add(cmd_args(command_line_args_file, format = "@{}")) - run_proguard_cmd.hidden(command_line_args) + run_proguard_cmd.add( + cmd_args(command_line_args_file, format = "@{}", hidden = command_line_args), + ) output_jars_file = ctx.actions.write("proguard/output_jars.txt", output_jars) @@ -131,7 +132,7 @@ def get_proguard_output( ctx: AnalysisContext, input_jars: dict[Artifact, TargetLabel], java_packaging_deps: list[JavaPackagingDep], - aapt_generated_proguard_config: [Artifact, None], + aapt_generated_proguard_config: Artifact | None, additional_library_jars: list[Artifact]) -> ProguardOutput: proguard_configs = [packaging_dep.proguard_config for packaging_dep in java_packaging_deps if packaging_dep.proguard_config] if ctx.attrs.proguard_config: diff --git a/prelude/android/r_dot_java.bzl b/prelude/android/r_dot_java.bzl index a21e69a4aa4..00b500b3ff1 100644 --- a/prelude/android/r_dot_java.bzl +++ b/prelude/android/r_dot_java.bzl @@ -8,16 +8,17 @@ load("@prelude//android:android_providers.bzl", "AndroidResourceInfo", "RDotJavaInfo") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//java:java_library.bzl", "compile_to_jar") -load("@prelude//java:java_providers.bzl", "JavaClasspathEntry", "JavaLibraryInfo", "derive_compiling_deps") -load("@prelude//utils:set.bzl", "set") +load("@prelude//java:java_providers.bzl", "JavaClasspathEntry", "JavaLibraryInfo", "derive_compiling_deps", "generate_java_classpath_snapshot") +load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") +load("@prelude//utils:argfile.bzl", "argfile") RDotJavaSourceCode = record( r_dot_java_source_code_dir = Artifact, r_dot_java_source_code_zipped = Artifact, - strings_source_code_dir = [Artifact, None], - strings_source_code_zipped = [Artifact, None], - ids_source_code_dir = [Artifact, None], - ids_source_code_zipped = [Artifact, None], + strings_source_code_dir = Artifact | None, + strings_source_code_zipped = Artifact | None, + ids_source_code_dir = Artifact | None, + ids_source_code_zipped = Artifact | None, ) def get_dummy_r_dot_java( @@ -39,7 +40,7 @@ def generate_r_dot_javas( banned_duplicate_resource_types: list[str], uber_r_dot_txt_files: list[Artifact], override_symbols_paths: list[Artifact], - duplicate_resources_allowlist: [Artifact, None], + duplicate_resources_allowlist: Artifact | None, union_package: [str, None], referenced_resources_lists: list[Artifact], generate_strings_and_ids_separately: [bool, None] = True, @@ -107,20 +108,23 @@ def _generate_r_dot_java_source_code( banned_duplicate_resource_types: list[str] = [], uber_r_dot_txt_files: list[Artifact] = [], override_symbols_paths: list[Artifact] = [], - duplicate_resources_allowlist: [Artifact, None] = None, + duplicate_resources_allowlist: Artifact | None = None, union_package: [str, None] = None, referenced_resources_lists: list[Artifact] = []) -> RDotJavaSourceCode: merge_resources_cmd = cmd_args(merge_android_resources_tool) r_dot_txt_info = cmd_args() deduped_android_resources = set([(android_resource.text_symbols, android_resource.r_dot_java_package, android_resource.raw_target) for android_resource in android_resources]) - for (text_symbols, r_dot_java_package, raw_target) in deduped_android_resources.list(): + for (text_symbols, r_dot_java_package, raw_target) in deduped_android_resources: r_dot_txt_info.add(cmd_args([text_symbols, r_dot_java_package, raw_target], delimiter = " ")) r_dot_txt_info_file = ctx.actions.write("r_dot_txt_info_file_for_{}.txt".format(identifier), r_dot_txt_info) merge_resources_cmd.add(["--symbol-file-info", r_dot_txt_info_file]) - merge_resources_cmd.hidden([android_resource.r_dot_java_package for android_resource in android_resources]) - merge_resources_cmd.hidden([android_resource.text_symbols for android_resource in android_resources]) + merge_resources_cmd.add(cmd_args( + hidden = + [android_resource.r_dot_java_package for android_resource in android_resources] + + [android_resource.text_symbols for android_resource in android_resources], + )) output_dir = ctx.actions.declare_output("{}_source_code".format(identifier), dir = True) merge_resources_cmd.add(["--output-dir", output_dir.as_output()]) @@ -150,14 +154,12 @@ def _generate_r_dot_java_source_code( merge_resources_cmd.add(["--banned-duplicate-resource-types", banned_duplicate_resource_types_file]) if len(uber_r_dot_txt_files) > 0: - uber_r_dot_txt_files_list = ctx.actions.write("uber_r_dot_txt_files_list", uber_r_dot_txt_files) + uber_r_dot_txt_files_list = argfile(actions = ctx.actions, name = "uber_r_dot_txt_files_list", args = uber_r_dot_txt_files) merge_resources_cmd.add(["--uber-r-dot-txt", uber_r_dot_txt_files_list]) - merge_resources_cmd.hidden(uber_r_dot_txt_files) if len(override_symbols_paths) > 0: - override_symbols_paths_list = ctx.actions.write("override_symbols_paths_list", override_symbols_paths) + override_symbols_paths_list = argfile(actions = ctx.actions, name = "override_symbols_paths_list", args = override_symbols_paths) merge_resources_cmd.add(["--override-symbols", override_symbols_paths_list]) - merge_resources_cmd.hidden(override_symbols_paths) if duplicate_resources_allowlist != None: merge_resources_cmd.add(["--duplicate-resource-allowlist-path", duplicate_resources_allowlist]) @@ -166,9 +168,8 @@ def _generate_r_dot_java_source_code( merge_resources_cmd.add(["--union-package", union_package]) if referenced_resources_lists: - referenced_resources_file = ctx.actions.write("referenced_resources_lists", referenced_resources_lists) + referenced_resources_file = argfile(actions = ctx.actions, name = "referenced_resources_lists", args = referenced_resources_lists) merge_resources_cmd.add(["--referenced-resources-lists", referenced_resources_file]) - merge_resources_cmd.hidden(referenced_resources_lists) ctx.actions.run(merge_resources_cmd, category = "r_dot_java_merge_resources", identifier = identifier) @@ -198,11 +199,13 @@ def _generate_and_compile_r_dot_java( ) # Extracting an abi is unnecessary as there's not really anything to strip. + jar_snapshot = generate_java_classpath_snapshot(ctx.actions, ctx.attrs._java_toolchain[JavaToolchainInfo].cp_snapshot_generator, r_dot_java_out, identifier) library_output = JavaClasspathEntry( full_library = r_dot_java_out, abi = r_dot_java_out, abi_as_dir = None, required_for_source_only_abi = False, + abi_jar_snapshot = jar_snapshot, ) return RDotJavaInfo( diff --git a/prelude/android/robolectric_test.bzl b/prelude/android/robolectric_test.bzl index 475a2c847db..386c927316b 100644 --- a/prelude/android/robolectric_test.bzl +++ b/prelude/android/robolectric_test.bzl @@ -6,9 +6,10 @@ # of this source tree. load("@prelude//android:android_binary_resources_rules.bzl", "get_android_binary_resources_info") -load("@prelude//android:android_library.bzl", "build_android_library") +load("@prelude//android:android_library.bzl", "build_android_library", "optional_jars") load("@prelude//android:android_providers.bzl", "merge_android_packageable_info") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//java:java_providers.bzl", "JavaLibraryInfo") load("@prelude//java:java_test.bzl", "build_junit_test") load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//utils:expect.bzl", "expect") @@ -68,7 +69,7 @@ def robolectric_test_impl(ctx: AnalysisContext) -> list[Provider]: ".", ]) ctx.actions.run(jar_cmd, category = "test_config_properties_jar_cmd") - extra_cmds.append(cmd_args().hidden(resources_info.primary_resources_apk, resources_info.manifest)) + extra_cmds.append(cmd_args(hidden = [resources_info.primary_resources_apk, resources_info.manifest])) r_dot_javas = [r_dot_java.library_info.library_output.full_library for r_dot_java in resources_info.r_dot_java_infos if r_dot_java.library_info.library_output] expect(len(r_dot_javas) <= 1, "android_library only works with single R.java") @@ -81,7 +82,7 @@ def robolectric_test_impl(ctx: AnalysisContext) -> list[Provider]: r_dot_java = None java_providers, _ = build_android_library(ctx, r_dot_java = r_dot_java, extra_sub_targets = extra_sub_targets) - extra_classpath_entries = [test_config_properties_jar] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath + extra_classpath_entries = [test_config_properties_jar] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath + optional_jars(ctx) extra_classpath_entries.extend(r_dot_javas) external_runner_test_info = build_junit_test( ctx, @@ -98,9 +99,17 @@ def robolectric_test_impl(ctx: AnalysisContext) -> list[Provider]: java_providers.template_placeholder_info, java_providers.default_info, java_providers.class_to_src_map, + java_providers.java_global_code_info, ] if ctx.attrs.used_as_dependency_deprecated_do_not_use: providers.append(java_providers.java_library_info) + else: + java_library_without_compiling_deps = JavaLibraryInfo( + compiling_deps = None, + library_output = java_providers.java_library_info.library_output, + output_for_classpath_macro = java_providers.java_library_info.output_for_classpath_macro, + ) + providers.append(java_library_without_compiling_deps) return providers diff --git a/prelude/android/tools/BUCK.v2 b/prelude/android/tools/BUCK.v2 index 35e57623c05..9f6de47604b 100644 --- a/prelude/android/tools/BUCK.v2 +++ b/prelude/android/tools/BUCK.v2 @@ -1,21 +1,29 @@ -native.python_bootstrap_binary( +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + +prelude = native # Avoid warnings and auto-formatters + +prelude.python_bootstrap_binary( name = "unpack_aar", main = "unpack_aar.py", visibility = ["PUBLIC"], deps = [ - ":unpack_aar_lib", "prelude//java/tools:utils_lib", + ":unpack_aar_lib", ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "unpack_aar_lib", srcs = [ "unpack_aar.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "filter_dex", main = "filter_dex.py", visibility = ["PUBLIC"], @@ -24,14 +32,14 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "filter_dex_lib", srcs = [ "filter_dex.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "combine_native_library_dirs", main = "combine_native_library_dirs.py", visibility = ["PUBLIC"], @@ -40,14 +48,14 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "combine_native_library_dirs_lib", srcs = [ "combine_native_library_dirs.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "filter_prebuilt_native_library_dir", main = "filter_prebuilt_native_library_dir.py", visibility = ["PUBLIC"], @@ -56,14 +64,14 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "filter_prebuilt_native_library_dir_lib", srcs = [ "filter_prebuilt_native_library_dir.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "native_libs_as_assets_metadata", main = "native_libs_as_assets_metadata.py", visibility = ["PUBLIC"], @@ -72,20 +80,20 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "native_libs_as_assets_metadata_lib", srcs = [ "native_libs_as_assets_metadata.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "compute_merge_sequence", main = "merge_sequence.py", visibility = ["PUBLIC"], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "filter_extra_resources", main = "filter_extra_resources.py", visibility = ["PUBLIC"], @@ -94,7 +102,7 @@ native.python_bootstrap_binary( ], ) -native.zip_file( +prelude.zip_file( name = "app_without_resources_stub", srcs = ["com/facebook/buck_generated/AppWithoutResourcesStub.java"], out = "app_without_resources_stub.src.zip", diff --git a/prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java b/prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java index 1c8a8df449c..9d3de2fc3a7 100644 --- a/prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java +++ b/prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java @@ -1,4 +1,11 @@ -// (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary. +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under both the MIT license found in the + * LICENSE-MIT file in the root directory of this source tree and the Apache + * License, Version 2.0 found in the LICENSE-APACHE file in the root directory + * of this source tree. + */ package com.facebook.buck_generated; diff --git a/prelude/android/tools/merge_sequence.py b/prelude/android/tools/merge_sequence.py index 093cdcd5462..65543e3e947 100644 --- a/prelude/android/tools/merge_sequence.py +++ b/prelude/android/tools/merge_sequence.py @@ -222,6 +222,7 @@ def get_base_name(self) -> str: class NodeData(typing.NamedTuple): base_library_name: str + could_be_root_for: list[str] module: str merge_group: int is_excluded: bool @@ -245,6 +246,7 @@ class FinalLibData(typing.NamedTuple): is_excluded: bool key: FinalLibKey deps: set[FinalLibKey] + entry_point_targets: set[str] class FinalLibGraph: @@ -255,7 +257,7 @@ class FinalLibGraph: def __init__(self) -> None: self.graph = {} - def add_node(self, node_data: NodeData, deps_data: list[NodeData]) -> None: + def _ensure_lib_data(self, node_data: NodeData) -> FinalLibData: lib_key = node_data.final_lib_key lib_data = self.graph.get(lib_key, None) if not lib_data: @@ -268,21 +270,41 @@ def add_node(self, node_data: NodeData, deps_data: list[NodeData]) -> None: is_excluded=node_data.is_excluded, key=lib_key, deps=set(), + entry_point_targets=set(), ), ) else: assert lib_data.module == node_data.module, (lib_data, node_data) assert lib_data.merge_group == node_data.merge_group, (lib_data, node_data) - for dep_data in deps_data: - if dep_data.final_lib_key != lib_key: + return lib_data + + def add_node( + self, + node_data: NodeData, + deps: list[str], + deps_data: list[NodeData], + ) -> None: + lib_data = self._ensure_lib_data(node_data) + + for dep, dep_data in zip(deps, deps_data): + if dep_data.final_lib_key != node_data.final_lib_key: lib_data.deps.add(dep_data.final_lib_key) + dep_lib_data = self._ensure_lib_data(dep_data) + dep_lib_data.entry_point_targets.add(dep) - def dump_graph(self, names: dict[FinalLibKey, str]) -> dict[str, list[str]]: + def dump_lib_edges(self, names: dict[FinalLibKey, str]) -> dict[str, list[str]]: return { names[k]: [names[d] for d in node.deps] for k, node in self.graph.items() } + def dump_entry_point_targets( + self, names: dict[FinalLibKey, str] + ) -> dict[str, list[str]]: + return { + names[k]: list(node.entry_point_targets) for k, node in self.graph.items() + } + def assign_names( self, merge_group_module_constituents: list[set[str]] ) -> dict[FinalLibKey, str]: @@ -291,7 +313,9 @@ def assign_names( final_lib_graph[key] = list(dep_data.deps) # this topo_sort also verifies that we produced an acyclic final lib graph - sorted_final_lib_keys = topo_sort(final_lib_graph) + sorted_final_lib_keys = topo_sort( + final_lib_graph, lambda x: self.graph[x].module if self.graph[x] else str(x) + ) name_counters = {} final_lib_names: dict[FinalLibKey, str] = {} @@ -550,6 +574,7 @@ def get_split_group( this_node_data = NodeData( base_library_name=base_library_name, + could_be_root_for=list(group_roots.get(target, set())), module=module, merge_group=current_merge_group, final_lib_key=FinalLibKey( @@ -566,7 +591,7 @@ def get_split_group( for target in post_ordered_targets: node = graph_node_map[target] deps_data = [node_data[dep] for dep in node.deps] - final_lib_graph.add_node(node_data[target], deps_data) + final_lib_graph.add_node(node_data[target], node.deps, deps_data) final_lib_names = final_lib_graph.assign_names(merge_group_module_constituents) return node_data, final_lib_names, final_lib_graph @@ -576,7 +601,9 @@ def get_split_group( def post_order_traversal_by( - roots: list[T], get_nodes_to_traverse_func: typing.Callable[[T], list[T]] + roots: list[T], + get_nodes_to_traverse_func: typing.Callable[[T], list[T]], + get_node_str: typing.Callable[[T], str] = None, ) -> list[T]: """ Returns the post-order sorted list of the nodes in the traversal. @@ -605,9 +632,17 @@ def post_order_traversal_by( work.append((OUTPUT, node)) for dep in get_nodes_to_traverse_func(node): if dep in current_parents: + current_parents_strs = [] + for k in current_parents: + current_parents_strs.append( + get_node_str(k) if get_node_str else str(k) + ) raise AssertionError( "detected cycle: {}".format( - " -> ".join(current_parents + [dep]) + " -> ".join( + current_parents_strs + + [get_node_str(dep) if get_node_str else str(dep)] + ) ) ) @@ -626,7 +661,9 @@ def is_root_module(module: str) -> bool: return module == ROOT_MODULE -def topo_sort(graph: dict[T, list[T]]) -> list[T]: +def topo_sort( + graph: dict[T, list[T]], get_node_str: typing.Callable[[T], str] = None +) -> list[T]: """ Topo-sort the given graph. """ @@ -642,7 +679,7 @@ def topo_sort(graph: dict[T, list[T]]) -> list[T]: if in_degree == 0: roots.append(node) - postordered = post_order_traversal_by(roots, lambda x: graph[x]) + postordered = post_order_traversal_by(roots, lambda x: graph[x], get_node_str) postordered.reverse() return postordered @@ -711,9 +748,14 @@ def main() -> int: # noqa: C901 else: final_mapping[target] = str(target) debug_results[platform] = ( + # Target name -> various information {k: v.debug() for k, v in node_data.items()}, + # Serialized FinalLibKey -> final library name {str(k): v for k, v in final_lib_names.items()}, - final_lib_graph.dump_graph(final_lib_names), + # Final library name -> final names of direct library dependencies + final_lib_graph.dump_lib_edges(final_lib_names), + # Final library name -> entry point targets + final_lib_graph.dump_entry_point_targets(final_lib_names), ) final_result[platform] = final_mapping diff --git a/prelude/android/tools/native_libs_as_assets_metadata.py b/prelude/android/tools/native_libs_as_assets_metadata.py index 6b31c0b7e62..87f8d5a1c3c 100644 --- a/prelude/android/tools/native_libs_as_assets_metadata.py +++ b/prelude/android/tools/native_libs_as_assets_metadata.py @@ -45,11 +45,6 @@ def main() -> None: type=Path, help="Metadata is written to this file", ) - parser.add_argument( - "--native-library-paths-output", - type=Path, - help="The actual paths of all the native libraries", - ) args = parser.parse_args() native_libraries = [] @@ -83,11 +78,6 @@ def main() -> None: ) ) - with open(args.native_library_paths_output, "w") as f: - f.write( - "\n".join([str(native_lib.full_path) for native_lib in native_libraries]) - ) - if __name__ == "__main__": main() diff --git a/prelude/android/tools/unpack_aar.py b/prelude/android/tools/unpack_aar.py index 3928c3b0510..b86bfb1382a 100644 --- a/prelude/android/tools/unpack_aar.py +++ b/prelude/android/tools/unpack_aar.py @@ -77,6 +77,12 @@ def _parse_args(): required=True, help="a path to the proguard config that is unpacked", ) + parser.add_argument( + "--lint-jar-path", + type=pathlib.Path, + required=True, + help="a path to the lint jar file that is unpacked", + ) parser.add_argument( "--jar-builder-tool", type=str, @@ -99,6 +105,7 @@ def main(): r_dot_txt_path = args.r_dot_txt_path annotation_jars_dir = args.annotation_jars_dir proguard_config_path = args.proguard_config_path + lint_jar_path = args.lint_jar_path jar_builder_tool = args.jar_builder_tool with TemporaryDirectory() as temp_dir: @@ -151,6 +158,12 @@ def main(): else: proguard_config_path.touch() + unpacked_lint_jar = unpack_dir / "lint.jar" + if unpacked_lint_jar.exists(): + shutil.copyfile(unpacked_lint_jar, lint_jar_path) + else: + lint_jar_path.touch() + # Java .class files can exist at `classes.jar` or any jar file in /libs, # so combine them into a single `.jar` file. all_jars = [] diff --git a/prelude/android/user/android_emulators.bzl b/prelude/android/user/android_emulators.bzl new file mode 100644 index 00000000000..29da9e6e7bd --- /dev/null +++ b/prelude/android/user/android_emulators.bzl @@ -0,0 +1,28 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") + +def _impl(ctx: AnalysisContext) -> list[Provider]: + return [ + DefaultInfo(), + LocalResourceInfo( + setup = cmd_args([ctx.attrs.broker[RunInfo]] + ctx.attrs.args), + resource_env_vars = { + "ANDROID_SERIAL": "serial_number", + }, + ), + ] + +registration_spec = RuleRegistrationSpec( + name = "android_emulators", + impl = _impl, + attrs = { + "args": attrs.list(attrs.arg(), default = []), + "broker": attrs.exec_dep(providers = [RunInfo]), + }, +) diff --git a/prelude/android/voltron.bzl b/prelude/android/voltron.bzl index d6622c261a0..20fcfc85ba9 100644 --- a/prelude/android/voltron.bzl +++ b/prelude/android/voltron.bzl @@ -15,8 +15,8 @@ load( "merge_shared_libraries", "traverse_shared_library_info", ) +load("@prelude//utils:argfile.bzl", "argfile") load("@prelude//utils:expect.bzl", "expect") -load("@prelude//utils:set.bzl", "set") load("@prelude//utils:utils.bzl", "flatten") # "Voltron" gives us the ability to split our Android APKs into different "modules". These @@ -68,7 +68,7 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions, ctx.label, [android_packageable_info], - traversed_shared_library_info.values(), + traversed_shared_library_info, ctx.attrs._android_toolchain[AndroidToolchainInfo], ctx.attrs.application_module_configs, ctx.attrs.application_module_dependencies, @@ -79,19 +79,19 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: no_dx_target_labels = [no_dx_target.label.raw_target() for no_dx_target in ctx.attrs.no_dx] java_packaging_deps = [packaging_dep for packaging_dep in get_all_java_packaging_deps(ctx, all_deps) if packaging_dep.dex and packaging_dep.dex.dex.owner.raw_target() not in no_dx_target_labels] targets_to_jars_args = [cmd_args([str(packaging_dep.label.raw_target()), packaging_dep.jar], delimiter = " ") for packaging_dep in java_packaging_deps] - targets_to_jars = ctx.actions.write("targets_to_jars.txt", targets_to_jars_args) + targets_to_jars = argfile(actions = ctx.actions, name = "targets_to_jars.txt", args = targets_to_jars_args) cmd.add([ "--targets-to-jars", targets_to_jars, - ]).hidden(targets_to_jars_args) + ]) if ctx.attrs.should_include_libraries: - targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), so_name], delimiter = " ") for so_name, shared_lib in traversed_shared_library_info.items()] - targets_to_so_names = ctx.actions.write("targets_to_so_names.txt", targets_to_so_names_args) + targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), shared_lib.soname.ensure_str()], delimiter = " ") for shared_lib in traversed_shared_library_info] + targets_to_so_names = argfile(actions = ctx.actions, name = "targets_to_so_names.txt", args = targets_to_so_names_args) cmd.add([ "--targets-to-so-names", targets_to_so_names, - ]).hidden(targets_to_so_names_args) + ]) traversed_prebuilt_native_library_dirs = android_packageable_info.prebuilt_native_library_dirs.traverse() if android_packageable_info.prebuilt_native_library_dirs else [] targets_to_non_assets_prebuilt_native_library_dirs_args = [ @@ -99,17 +99,21 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: for prebuilt_native_library_dir in traversed_prebuilt_native_library_dirs if not prebuilt_native_library_dir.is_asset and not prebuilt_native_library_dir.for_primary_apk ] - targets_to_non_assets_prebuilt_native_library_dirs = ctx.actions.write("targets_to_non_assets_prebuilt_native_library_dirs.txt", targets_to_non_assets_prebuilt_native_library_dirs_args) + targets_to_non_assets_prebuilt_native_library_dirs = argfile( + actions = ctx.actions, + name = "targets_to_non_assets_prebuilt_native_library_dirs.txt", + args = targets_to_non_assets_prebuilt_native_library_dirs_args, + ) cmd.add([ "--targets-to-non-asset-prebuilt-native-library-dirs", targets_to_non_assets_prebuilt_native_library_dirs, - ]).hidden(targets_to_non_assets_prebuilt_native_library_dirs_args) + ]) ctx.actions.run(cmd, category = "apk_module_graph") return [DefaultInfo(default_output = output)] -def get_target_to_module_mapping(ctx: AnalysisContext, deps_by_platform: dict[str, list[Dependency]]) -> [Artifact, None]: +def get_target_to_module_mapping(ctx: AnalysisContext, deps_by_platform: dict[str, list[Dependency]]) -> Artifact | None: if not ctx.attrs.application_module_configs: return None @@ -121,7 +125,7 @@ def get_target_to_module_mapping(ctx: AnalysisContext, deps_by_platform: dict[st ctx.actions, deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]), ) - shared_libraries.extend(traverse_shared_library_info(shared_library_info).values()) + shared_libraries.extend(traverse_shared_library_info(shared_library_info)) cmd, output = _get_base_cmd_and_output( ctx.actions, @@ -148,7 +152,7 @@ def _get_base_cmd_and_output( android_toolchain: AndroidToolchainInfo, application_module_configs: dict[str, list[Dependency]], application_module_dependencies: [dict[str, list[str]], None], - application_module_blocklist: [list[list[Dependency]], None]) -> (cmd_args, Artifact): + application_module_blocklist: [list[Dependency], None]) -> (cmd_args, Artifact): deps_map = {} primary_apk_deps = set() for android_packageable_info in android_packageable_infos: @@ -187,10 +191,10 @@ def _get_base_cmd_and_output( used_by_wrap_script_libs = [str(shared_lib.label.raw_target()) for shared_lib in shared_libraries if shared_lib.for_primary_apk] prebuilt_native_library_dirs = flatten([list(android_packageable_info.prebuilt_native_library_dirs.traverse()) if android_packageable_info.prebuilt_native_library_dirs else [] for android_packageable_info in android_packageable_infos]) prebuilt_native_library_targets_for_primary_apk = dedupe([str(native_lib_dir.raw_target) for native_lib_dir in prebuilt_native_library_dirs if native_lib_dir.for_primary_apk]) - if application_module_blocklist or used_by_wrap_script_libs or prebuilt_native_library_targets_for_primary_apk or primary_apk_deps.size() > 0: - all_blocklisted_deps = used_by_wrap_script_libs + prebuilt_native_library_targets_for_primary_apk + primary_apk_deps.list() + if application_module_blocklist or used_by_wrap_script_libs or prebuilt_native_library_targets_for_primary_apk or len(primary_apk_deps) > 0: + all_blocklisted_deps = used_by_wrap_script_libs + prebuilt_native_library_targets_for_primary_apk + list(primary_apk_deps) if application_module_blocklist: - all_blocklisted_deps.extend([str(blocklisted_dep.label.raw_target()) for blocklisted_dep in flatten(application_module_blocklist)]) + all_blocklisted_deps.extend([str(blocklisted_dep.label.raw_target()) for blocklisted_dep in application_module_blocklist]) application_module_blocklist_file = actions.write( "application_module_blocklist.txt", @@ -216,6 +220,9 @@ APKModuleGraphInfo = record( target_to_module_mapping_function = typing.Callable, module_to_canary_class_name_function = typing.Callable, module_to_module_deps_function = typing.Callable, + transitive_module_deps_function = typing.Callable, + calculated_deps_function = typing.Callable, + get_deps_debug_data = typing.Callable, ) def get_root_module_only_apk_module_graph_info() -> APKModuleGraphInfo: @@ -232,6 +239,9 @@ def get_root_module_only_apk_module_graph_info() -> APKModuleGraphInfo: target_to_module_mapping_function = all_targets_in_root_module, module_to_canary_class_name_function = root_module_canary_class_name, module_to_module_deps_function = root_module_deps, + transitive_module_deps_function = root_module_deps, + calculated_deps_function = root_module_deps, + get_deps_debug_data = root_module_deps, ) def get_apk_module_graph_info( @@ -246,6 +256,9 @@ def get_apk_module_graph_info( module_to_canary_class_name_map = {} module_to_module_deps_map = {} + transitive_module_deps_map = {} + calculated_deps_map = {} + shared_module_rdeps = {} for line in module_infos: line_data = line.split(" ") module_name = line_data[0] @@ -253,12 +266,37 @@ def get_apk_module_graph_info( module_deps = [module_dep for module_dep in line_data[2:] if module_dep] module_to_canary_class_name_map[module_name] = canary_class_name module_to_module_deps_map[module_name] = module_deps + shared_modules = [module_dep for module_dep in module_deps if module_dep.startswith("s_")] + for shared_module in shared_modules: + rdeps = shared_module_rdeps.get(shared_module, set()) + rdeps.add(module_name) + shared_module_rdeps[shared_module] = rdeps target_to_module_mapping = {str(ctx.label.raw_target()): ROOT_MODULE} for line in target_to_module_lines: target, module = line.split(" ") target_to_module_mapping[target] = module + for module, deps in module_to_module_deps_map.items(): + visited_modules = set() + queue = [d for d in deps] + for _ in range(1, 1000): # represents a while loop since while loops dont exist in starlark + if len(queue) == 0: + transitive_module_deps_map[module] = visited_modules + continue + node = queue.pop() + visited_modules.add(node) + for d in module_to_module_deps_map[node]: + if d not in visited_modules: + queue.append(d) + for shared_module, rdeps in shared_module_rdeps.items(): + rdeps_list = list(rdeps) + head = rdeps_list[0] + intersection = transitive_module_deps_map[head] + for rdep in rdeps_list[1:]: + intersection = intersection & transitive_module_deps_map[rdep] + calculated_deps_map[shared_module] = intersection | rdeps + def target_to_module_mapping_function(raw_target: str) -> str: mapped_module = target_to_module_mapping.get(raw_target) expect(mapped_module != None, "No module found for target {}!".format(raw_target)) @@ -270,9 +308,21 @@ def get_apk_module_graph_info( def module_to_module_deps_function(voltron_module: str) -> list: return module_to_module_deps_map.get(voltron_module) + def transitive_module_deps_function(voltron_module: str) -> set[str]: + return transitive_module_deps_map.get(voltron_module) + + def calculated_deps_function(voltron_module: str) -> set[str]: + return calculated_deps_map.get(voltron_module) if voltron_module in calculated_deps_map else set() + + def get_deps_debug_data() -> str: + return "tdeps - {} \n calculated deps - {}".format(transitive_module_deps_map, calculated_deps_map) + return APKModuleGraphInfo( module_list = module_to_canary_class_name_map.keys(), target_to_module_mapping_function = target_to_module_mapping_function, module_to_canary_class_name_function = module_to_canary_class_name_function, module_to_module_deps_function = module_to_module_deps_function, + transitive_module_deps_function = transitive_module_deps_function, + calculated_deps_function = calculated_deps_function, + get_deps_debug_data = get_deps_debug_data, ) diff --git a/prelude/apple/apple_asset_catalog.bzl b/prelude/apple/apple_asset_catalog.bzl index a336cc2be62..c16c1b68a0c 100644 --- a/prelude/apple/apple_asset_catalog.bzl +++ b/prelude/apple/apple_asset_catalog.bzl @@ -38,7 +38,13 @@ def compile_apple_asset_catalog(ctx: AnalysisContext, specs: list[AppleAssetCata processing_options = get_bundle_resource_processing_options(ctx) compilation_options = get_apple_asset_catalogs_compilation_options(ctx) command = _get_actool_command(ctx, single_spec, catalog.as_output(), plist.as_output(), compilation_options) - ctx.actions.run(command, prefer_local = processing_options.prefer_local, allow_cache_upload = processing_options.allow_cache_upload, category = "apple_asset_catalog") + ctx.actions.run( + command, + prefer_local = processing_options.prefer_local, + prefer_remote = processing_options.prefer_remote, + allow_cache_upload = processing_options.allow_cache_upload, + category = "apple_asset_catalog", + ) return AppleAssetCatalogResult(compiled_catalog = catalog, catalog_plist = plist) def _merge_asset_catalog_specs(ctx: AnalysisContext, xs: list[AppleAssetCatalogSpec]) -> AppleAssetCatalogSpec: @@ -111,5 +117,5 @@ def _get_actool_command(ctx: AnalysisContext, info: AppleAssetCatalogSpec, catal ], allow_args = True, ) - command = cmd_args(["/bin/sh", wrapper_script]).hidden([actool_command, catalog_output]) + command = cmd_args(["/bin/sh", wrapper_script], hidden = [actool_command, catalog_output]) return command diff --git a/prelude/apple/apple_binary.bzl b/prelude/apple/apple_binary.bzl index fce5397e439..3a7644dbf03 100644 --- a/prelude/apple/apple_binary.bzl +++ b/prelude/apple/apple_binary.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:attrs_validators.bzl", "get_attrs_validators_outputs") load("@prelude//:paths.bzl", "paths") load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//apple:apple_stripping.bzl", "apple_strip_args") @@ -23,13 +24,13 @@ load( "@prelude//cxx:argsfiles.bzl", "CompileArgsfiles", ) +load("@prelude//cxx:cxx_executable.bzl", "cxx_executable") +load("@prelude//cxx:cxx_library_utility.bzl", "cxx_attr_deps", "cxx_attr_exported_deps") load( - "@prelude//cxx:compile.bzl", + "@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type + "get_srcs_with_flags", ) -load("@prelude//cxx:cxx_executable.bzl", "cxx_executable") -load("@prelude//cxx:cxx_library_utility.bzl", "cxx_attr_deps", "cxx_attr_exported_deps") -load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") load( "@prelude//cxx:cxx_types.bzl", "CxxRuleAdditionalParams", @@ -41,6 +42,7 @@ load( "cxx_get_regular_cxx_headers_layout", "prepare_headers", ) +load("@prelude//cxx:index_store.bzl", "create_index_store_subtargets_and_provider") load( "@prelude//cxx:link_groups.bzl", "get_link_group_info", @@ -63,8 +65,9 @@ load(":apple_bundle_utility.bzl", "get_bundle_infos_from_graph", "merge_bundle_l load(":apple_code_signing_types.bzl", "AppleEntitlementsInfo") load(":apple_dsym.bzl", "DSYM_SUBTARGET", "get_apple_dsym") load(":apple_entitlements.bzl", "entitlements_link_flags") +load(":apple_error_handler.bzl", "apple_build_error_handler") load(":apple_frameworks.bzl", "get_framework_search_path_flags") -load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") +load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node") load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback") load(":debug.bzl", "AppleDebuggableInfo") load(":resource_groups.bzl", "create_resource_graph") @@ -78,6 +81,7 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: objc_bridging_header_flags = _get_bridging_header_flags(ctx) cxx_srcs, swift_srcs = _filter_swift_srcs(ctx) + contains_swift_sources = len(swift_srcs) > 0 framework_search_path_flags = get_framework_search_path_flags(ctx) swift_compile, _ = compile_swift( @@ -96,13 +100,13 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: extra_linker_output_flags, extra_linker_output_providers = [], {} # @oss-enable # @oss-disable: extra_linker_output_flags, extra_linker_output_providers = add_extra_linker_outputs(ctx) - extra_link_flags = get_min_deployment_version_target_linker_flags(ctx) + entitlements_link_flags(ctx) + extra_linker_output_flags + extra_link_flags = entitlements_link_flags(ctx) + extra_linker_output_flags framework_search_path_pre = CPreprocessor( - relative_args = CPreprocessorArgs(args = [framework_search_path_flags]), + args = CPreprocessorArgs(args = [framework_search_path_flags]), ) - swift_dependency_info = swift_compile.dependency_info if swift_compile else get_swift_dependency_info(ctx, None, None, deps_providers) + swift_dependency_info = swift_compile.dependency_info if swift_compile else get_swift_dependency_info(ctx, None, deps_providers) swift_debug_info = get_swift_debug_infos( ctx, swift_dependency_info, @@ -133,18 +137,30 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: ), ], }, + external_debug_info_tags = [], # This might be used to materialise all transitive Swift related object files with ArtifactInfoTag("swiftmodule") ), extra_link_input = swift_object_files, extra_link_input_has_external_debug_info = True, - extra_preprocessors = get_min_deployment_version_target_preprocessor_flags(ctx) + [framework_search_path_pre] + swift_preprocessor, + extra_preprocessors = [framework_search_path_pre] + swift_preprocessor, strip_executable = stripped, strip_args_factory = apple_strip_args, - cxx_populate_xcode_attributes_func = apple_populate_xcode_attributes, + cxx_populate_xcode_attributes_func = lambda local_ctx, **kwargs: apple_populate_xcode_attributes(local_ctx, contains_swift_sources = contains_swift_sources, **kwargs), link_group_info = get_link_group_info(ctx), prefer_stripped_objects = ctx.attrs.prefer_stripped_objects, # Some apple rules rely on `static` libs *not* following dependents. link_groups_force_static_follows_dependents = False, swiftmodule_linkable = get_swiftmodule_linkable(swift_compile), + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, + error_handler = apple_build_error_handler, + index_stores = swift_compile.index_stores if swift_compile else None, + executable_name = ctx.attrs.executable_name, ) cxx_output = cxx_executable(ctx, constructor_params) @@ -171,11 +187,13 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: min_version = get_min_deployment_version_for_node(ctx) min_version_providers = [AppleMinDeploymentVersionInfo(version = min_version)] + non_exported_deps = cxx_attr_deps(ctx) + exported_deps = cxx_attr_exported_deps(ctx) resource_graph = create_resource_graph( ctx = ctx, labels = ctx.attrs.labels, - deps = cxx_attr_deps(ctx), - exported_deps = cxx_attr_exported_deps(ctx), + deps = non_exported_deps, + exported_deps = exported_deps, ) bundle_infos = get_bundle_infos_from_graph(resource_graph) if cxx_output.linker_map_data: @@ -189,16 +207,27 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: if cxx_output.sanitizer_runtime_files: sanitizer_runtime_providers.append(CxxSanitizerRuntimeInfo(runtime_files = cxx_output.sanitizer_runtime_files)) + attrs_validators_providers, attrs_validators_subtargets = get_attrs_validators_outputs(ctx) + + index_stores = [] + if swift_compile and swift_compile.index_stores: + index_stores.extend(swift_compile.index_stores) + index_stores.extend(cxx_output.index_stores) + + index_store_subtargets, index_store_info = create_index_store_subtargets_and_provider(ctx, index_stores, non_exported_deps + exported_deps) + cxx_output.sub_targets.update(index_store_subtargets) + return [ - DefaultInfo(default_output = cxx_output.binary, sub_targets = cxx_output.sub_targets), - RunInfo(args = cmd_args(cxx_output.binary).hidden(cxx_output.runtime_files)), + DefaultInfo(default_output = cxx_output.binary, sub_targets = cxx_output.sub_targets | attrs_validators_subtargets), + RunInfo(args = cmd_args(cxx_output.binary, hidden = cxx_output.runtime_files)), AppleEntitlementsInfo(entitlements_file = ctx.attrs.entitlements_file), AppleDebuggableInfo(dsyms = [dsym_artifact], debug_info_tset = cxx_output.external_debug_info), cxx_output.xcode_data, cxx_output.compilation_db, merge_bundle_linker_maps_info(bundle_infos), UnstrippedLinkOutputInfo(artifact = unstripped_binary), - ] + [resource_graph] + min_version_providers + link_command_providers + sanitizer_runtime_providers + index_store_info, + ] + [resource_graph] + min_version_providers + link_command_providers + sanitizer_runtime_providers + attrs_validators_providers if uses_explicit_modules(ctx): return get_swift_anonymous_targets(ctx, get_apple_binary_providers) @@ -229,8 +258,7 @@ def _get_bridging_header_flags(ctx: AnalysisContext) -> list[ArgLike]: header_map = {paths.join(h.namespace, h.name): h.artifact for h in headers} # We need to expose private headers to swift-compile action, in case something is imported to bridging header. - # TODO(chatatap): Handle absolute paths here. - header_root = prepare_headers(ctx, header_map, "apple-binary-private-headers", None) + header_root = prepare_headers(ctx, header_map, "apple-binary-private-headers") if header_root != None: private_headers_args = [cmd_args("-I"), header_root.include_path] else: diff --git a/prelude/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl index b461efff02c..11a0372a31c 100644 --- a/prelude/apple/apple_bundle.bzl +++ b/prelude/apple/apple_bundle.bzl @@ -14,12 +14,20 @@ load( load("@prelude//:paths.bzl", "paths") load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") +load("@prelude//apple:apple_xctest_frameworks_utility.bzl", "get_xctest_frameworks_bundle_parts") # @oss-disable: load("@prelude//apple/meta_only:linker_outputs.bzl", "subtargets_for_apple_bundle_extra_outputs") load("@prelude//apple/user:apple_selected_debug_path_file.bzl", "SELECTED_DEBUG_PATH_FILE_NAME") load("@prelude//apple/user:apple_selective_debugging.bzl", "AppleSelectiveDebuggingInfo") +load("@prelude//apple/validation:debug_artifacts.bzl", "get_debug_artifacts_validators") +load( + "@prelude//cxx:index_store.bzl", + "IndexStoreInfo", # @unused Used as a type + "create_index_store_subtargets_and_provider", +) load( "@prelude//ide_integrations:xcode.bzl", "XCODE_DATA_SUB_TARGET", + "XcodeDataInfoKeys", "generate_xcode_data", ) load( @@ -34,19 +42,14 @@ load( "make_link_command_debug_output_json_info", ) load("@prelude//utils:arglike.bzl", "ArgLike") -load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:lazy.bzl", "lazy") -load( - "@prelude//utils:set.bzl", - "set", -) load( "@prelude//utils:utils.bzl", "flatten", ) load(":apple_bundle_destination.bzl", "AppleBundleDestination") load(":apple_bundle_part.bzl", "AppleBundlePart", "SwiftStdlibArguments", "assemble_bundle", "bundle_output", "get_apple_bundle_part_relative_destination_path", "get_bundle_dir_name") -load(":apple_bundle_resources.bzl", "get_apple_bundle_resource_part_list", "get_is_watch_bundle") +load(":apple_bundle_resources.bzl", "get_apple_bundle_resource_part_list") load( ":apple_bundle_types.bzl", "AppleBinaryExtraOutputsInfo", @@ -57,11 +60,16 @@ load( "AppleBundleResourceInfo", "AppleBundleType", "AppleBundleTypeDefault", - "AppleBundleTypeWatchApp", ) load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_default_binary_dep", "get_flattened_binary_deps", "get_product_name") -load(":apple_dsym.bzl", "DSYM_INFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym", "get_apple_dsym_ext", "get_apple_dsym_info") +load(":apple_code_signing_types.bzl", "CodeSignConfiguration") +load(":apple_dsym.bzl", "DSYM_INFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym", "get_apple_dsym_ext", "get_apple_dsym_info_json") load(":apple_sdk.bzl", "get_apple_sdk_name") +load( + ":apple_sdk_metadata.bzl", + "MacOSXCatalystSdkMetadata", + "MacOSXSdkMetadata", +) load(":apple_universal_binaries.bzl", "create_universal_binary") load( ":debug.bzl", @@ -71,7 +79,6 @@ load( ) load(":xcode.bzl", "apple_xcode_data_add_xctoolchain") -INSTALL_DATA_SUB_TARGET = "install-data" _INSTALL_DATA_FILE_NAME = "install_apple_data.json" _PLIST = "plist" @@ -79,8 +86,7 @@ _PLIST = "plist" _XCTOOLCHAIN_SUB_TARGET = "xctoolchain" AppleBundleDebuggableInfo = record( - # Can be `None` for WatchKit stub - binary_info = field([AppleDebuggableInfo, None]), + binary_info = field(AppleDebuggableInfo), # Debugable info of all bundle deps dep_infos = field(list[AppleDebuggableInfo]), # Concat of `binary_info` and `dep_infos` @@ -100,13 +106,6 @@ AppleBundlePartListOutput = record( ) def _get_binary(ctx: AnalysisContext) -> AppleBundleBinaryOutput: - # No binary means we are building watchOS bundle. In v1 bundle binary is present, but its sources are empty. - if ctx.attrs.binary == None: - return AppleBundleBinaryOutput( - binary = _get_watch_kit_stub_artifact(ctx), - is_watchkit_stub_binary = True, - ) - if len(get_flattened_binary_deps(ctx.attrs.binary)) > 1: if ctx.attrs.selective_debugging != None: fail("Selective debugging is not supported for universal binaries.") @@ -127,9 +126,10 @@ def _get_binary(ctx: AnalysisContext) -> AppleBundleBinaryOutput: def _get_bundle_dsym_name(ctx: AnalysisContext) -> str: return paths.replace_extension(get_bundle_dir_name(ctx), ".dSYM") -def _scrub_binary(ctx, binary: Artifact, binary_execution_preference_info: None | LinkExecutionPreferenceInfo) -> Artifact: +def _scrub_binary(ctx, binary: Artifact, binary_execution_preference_info: None | LinkExecutionPreferenceInfo, focused_targets_labels: list[Label] = []) -> Artifact: # If fast adhoc code signing is enabled, we need to resign the binary as it won't be signed later. - if ctx.attrs._fast_adhoc_signing_enabled: + code_signing_configuration = CodeSignConfiguration(ctx.attrs._code_signing_configuration) + if code_signing_configuration == CodeSignConfiguration("fast-adhoc"): apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] adhoc_codesign_tool = apple_tools.adhoc_codesign_tool else: @@ -137,7 +137,7 @@ def _scrub_binary(ctx, binary: Artifact, binary_execution_preference_info: None selective_debugging_info = ctx.attrs.selective_debugging[AppleSelectiveDebuggingInfo] preference = binary_execution_preference_info.preference if binary_execution_preference_info else LinkExecutionPreference("any") - return selective_debugging_info.scrub_binary(ctx, binary, preference, adhoc_codesign_tool) + return selective_debugging_info.scrub_binary(ctx, binary, preference, adhoc_codesign_tool, focused_targets_labels) def _maybe_scrub_binary(ctx, binary_dep: Dependency) -> AppleBundleBinaryOutput: binary = binary_dep[DefaultInfo].default_outputs[0] @@ -145,26 +145,30 @@ def _maybe_scrub_binary(ctx, binary_dep: Dependency) -> AppleBundleBinaryOutput: if ctx.attrs.selective_debugging == None: return AppleBundleBinaryOutput(binary = binary, debuggable_info = debuggable_info) - binary = _scrub_binary(ctx, binary, binary_dep.get(LinkExecutionPreferenceInfo)) - if not debuggable_info: - return AppleBundleBinaryOutput(binary = binary) + if debuggable_info: + # If we have debuggable info for this binary, create the scrubed dsym for the binary and filter debug info. + debug_info_tset = debuggable_info.debug_info_tset - # If we have debuggable info for this binary, create the scrubed dsym for the binary and filter debug info. - debug_info_tset = debuggable_info.debug_info_tset - dsym_artifact = _get_scrubbed_binary_dsym(ctx, binary, debug_info_tset) + # The traversal is intentionally designed to be topological, allowing us to skip + # portions of the debug info that are not transitive in relation to the focused targets. + all_debug_info = debug_info_tset._tset.traverse(ordering = "topological") + selective_debugging_info = ctx.attrs.selective_debugging[AppleSelectiveDebuggingInfo] + filtered_debug_info = selective_debugging_info.filter(all_debug_info) - all_debug_info = debug_info_tset._tset.traverse() - selective_debugging_info = ctx.attrs.selective_debugging[AppleSelectiveDebuggingInfo] - filtered_debug_info = selective_debugging_info.filter(all_debug_info) + filtered_external_debug_info = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = flatten(filtered_debug_info.map.values()), + ) - filtered_external_debug_info = make_artifact_tset( - actions = ctx.actions, - label = ctx.label, - artifacts = flatten(filtered_debug_info.map.values()), - ) - debuggable_info = AppleDebuggableInfo(dsyms = [dsym_artifact], debug_info_tset = filtered_external_debug_info, filtered_map = filtered_debug_info.map) + binary = _scrub_binary(ctx, binary, binary_dep.get(LinkExecutionPreferenceInfo), filtered_debug_info.swift_modules_labels) + dsym_artifact = _get_scrubbed_binary_dsym(ctx, binary, debug_info_tset) - return AppleBundleBinaryOutput(binary = binary, debuggable_info = debuggable_info) + debuggable_info = AppleDebuggableInfo(dsyms = [dsym_artifact], debug_info_tset = filtered_external_debug_info, filtered_map = filtered_debug_info.map) + return AppleBundleBinaryOutput(binary = binary, debuggable_info = debuggable_info) + else: + binary = _scrub_binary(ctx, binary, binary_dep.get(LinkExecutionPreferenceInfo)) + return AppleBundleBinaryOutput(binary = binary) def _get_scrubbed_binary_dsym(ctx, binary: Artifact, debug_info_tset: ArtifactTSet) -> Artifact: debug_info = project_artifacts( @@ -183,9 +187,6 @@ def _get_binary_bundle_parts(ctx: AnalysisContext, binary_output: AppleBundleBin """Returns a tuple of all binary bundle parts and the primary bundle binary.""" result = [] - if binary_output.is_watchkit_stub_binary: - # If we're using a stub binary from watchkit, we also need to add extra part for stub. - result.append(AppleBundlePart(source = binary_output.binary, destination = AppleBundleDestination("watchkitstub"), new_name = "WK")) primary_binary_part = AppleBundlePart(source = binary_output.binary, destination = AppleBundleDestination("executables"), new_name = get_product_name(ctx)) result.append(primary_binary_part) @@ -196,10 +197,6 @@ def _get_binary_bundle_parts(ctx: AnalysisContext, binary_output: AppleBundleBin return result, primary_binary_part def _get_dsym_input_binary_arg(ctx: AnalysisContext, primary_binary_path_arg: cmd_args) -> cmd_args: - # No binary means we are building watchOS bundle. In v1 bundle binary is present, but its sources are empty. - if ctx.attrs.binary == None: - return cmd_args(_get_watch_kit_stub_artifact(ctx)) - binary_dep = get_default_binary_dep(ctx.attrs.binary) default_binary = binary_dep[DefaultInfo].default_outputs[0] @@ -215,13 +212,6 @@ def _get_dsym_input_binary_arg(ctx: AnalysisContext, primary_binary_path_arg: cm else: return primary_binary_path_arg -def _get_watch_kit_stub_artifact(ctx: AnalysisContext) -> Artifact: - expect(ctx.attrs.binary == None, "Stub is useful only when binary is not set which means watchOS bundle is built.") - stub_binary = ctx.attrs._apple_toolchain[AppleToolchainInfo].watch_kit_stub_binary - if stub_binary == None: - fail("Expected Watch Kit stub binary to be provided when bundle binary is not set.") - return stub_binary - def _apple_bundle_run_validity_checks(ctx: AnalysisContext): if ctx.attrs.extension == None: fail("`extension` attribute is required") @@ -237,10 +227,6 @@ def _get_deps_debuggable_infos(ctx: AnalysisContext) -> list[AppleDebuggableInfo return deps_debuggable_infos def _get_bundle_binary_dsym_artifacts(ctx: AnalysisContext, binary_output: AppleBundleBinaryOutput, executable_arg: ArgLike) -> list[Artifact]: - # We don't care to process the watchkit stub binary. - if binary_output.is_watchkit_stub_binary: - return [] - if not ctx.attrs.split_arch_dsym: # Calling `dsymutil` on the correctly named binary in the _final bundle_ to yield dsym files # with naming convention compatible with Meta infra. @@ -260,15 +246,12 @@ def _get_bundle_binary_dsym_artifacts(ctx: AnalysisContext, binary_output: Apple return binary_output.debuggable_info.dsyms def _get_all_agg_debug_info(ctx: AnalysisContext, binary_output: AppleBundleBinaryOutput, deps_debuggable_infos: list[AppleDebuggableInfo]) -> AggregatedAppleDebugInfo: - all_debug_infos = deps_debuggable_infos - if not binary_output.is_watchkit_stub_binary: - binary_debuggable_info = binary_output.debuggable_info - all_debug_infos = all_debug_infos + [binary_debuggable_info] + all_debug_infos = deps_debuggable_infos + ([binary_output.debuggable_info] if binary_output.debuggable_info else []) return get_aggregated_debug_info(ctx, all_debug_infos) def _maybe_scrub_selected_debug_paths_file(ctx: AnalysisContext, package_names: list[str]) -> Artifact: if not ctx.attrs.selective_debugging: - return ctx.actions.write(SELECTED_DEBUG_PATH_FILE_NAME, sorted(set(package_names).list())) + return ctx.actions.write(SELECTED_DEBUG_PATH_FILE_NAME, sorted(set(package_names))) selective_debugging_info = ctx.attrs.selective_debugging[AppleSelectiveDebuggingInfo] return selective_debugging_info.scrub_selected_debug_paths_file(ctx, package_names, SELECTED_DEBUG_PATH_FILE_NAME) @@ -292,18 +275,25 @@ def get_apple_bundle_part_list(ctx: AnalysisContext, params: AppleBundlePartList if resource_part_list == None: resource_part_list = get_apple_bundle_resource_part_list(ctx) + xctest_frameworks_parts = [] + if getattr(ctx.attrs, "embed_xctest_frameworks", False): + if getattr(ctx.attrs, "extension", "") == "app": + # XCTest frameworks should only be enabled for the top-level app, + # not for any other bundles in the dep graph + xctest_frameworks_parts = get_xctest_frameworks_bundle_parts( + ctx, + # It's not possible to pass information down the graph whether + # the `apple_test()` rdep needs Swift support, so just assume + # it does, in the future, Obj-C only test targets would be rare. + swift_support_needed = True, + ) + return AppleBundlePartListOutput( - parts = resource_part_list.resource_parts + params.binaries, + parts = resource_part_list.resource_parts + params.binaries + xctest_frameworks_parts, info_plist_part = resource_part_list.info_plist_part, ) def _infer_apple_bundle_type(ctx: AnalysisContext) -> AppleBundleType: - is_watchos = get_is_watch_bundle(ctx) - if is_watchos and ctx.attrs.bundle_type: - fail("Cannot have a watchOS app with an explicit `bundle_type`, target: {}".format(ctx.label)) - - if is_watchos: - return AppleBundleTypeWatchApp if ctx.attrs.bundle_type != None: return AppleBundleType(ctx.attrs.bundle_type) @@ -325,18 +315,26 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: primary_binary_rel_path = get_apple_bundle_part_relative_destination_path(ctx, primary_binary_part) validation_deps_outputs = get_validation_deps_outputs(ctx) - sub_targets = assemble_bundle( + + should_enable_incremental_bundling = True + sdk_name = get_apple_sdk_name(ctx) + if sdk_name == MacOSXSdkMetadata.name or sdk_name == MacOSXCatalystSdkMetadata.name: + should_enable_incremental_bundling = False + + bundle_result = assemble_bundle( ctx, bundle, apple_bundle_part_list_output.parts, apple_bundle_part_list_output.info_plist_part, SwiftStdlibArguments(primary_binary_rel_path = primary_binary_rel_path), validation_deps_outputs, + incremental_bundling_override = should_enable_incremental_bundling, ) + sub_targets = bundle_result.sub_targets sub_targets.update(aggregated_debug_info.sub_targets) primary_binary_path = cmd_args([bundle, primary_binary_rel_path], delimiter = "/") - primary_binary_path_arg = cmd_args(primary_binary_path).hidden(bundle) + primary_binary_path_arg = cmd_args(primary_binary_path, hidden = bundle) linker_maps_directory, linker_map_info = _linker_maps_data(ctx) sub_targets["linker-maps"] = [DefaultInfo(default_output = linker_maps_directory)] @@ -353,7 +351,8 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: if dsym_artifacts: sub_targets[DSYM_SUBTARGET] = [DefaultInfo(default_outputs = dsym_artifacts)] - dsym_info = get_apple_dsym_info(ctx, binary_dsyms = binary_dsym_artifacts, dep_dsyms = dep_dsym_artifacts) + dsym_info_json = get_apple_dsym_info_json(binary_dsym_artifacts, dep_dsym_artifacts) + dsym_info = ctx.actions.write_json("dsym-info.json", dsym_info_json, pretty = True) sub_targets[DSYM_INFO_SUBTARGET] = [ DefaultInfo(default_output = dsym_info, other_outputs = dsym_artifacts), ] @@ -363,10 +362,10 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: sub_targets[_XCTOOLCHAIN_SUB_TARGET] = ctx.attrs._apple_xctoolchain.providers # Define the xcode data sub target - xcode_data_default_info, xcode_data_info = generate_xcode_data(ctx, "apple_bundle", bundle, _xcode_populate_attributes, processed_info_plist = apple_bundle_part_list_output.info_plist_part.source) + plist_bundle_relative_path = get_apple_bundle_part_relative_destination_path(ctx, apple_bundle_part_list_output.info_plist_part) + xcode_data_default_info, xcode_data_info = generate_xcode_data(ctx, "apple_bundle", bundle, _xcode_populate_attributes, processed_info_plist = apple_bundle_part_list_output.info_plist_part.source, info_plist_relative_path = plist_bundle_relative_path) sub_targets[XCODE_DATA_SUB_TARGET] = xcode_data_default_info - plist_bundle_relative_path = get_apple_bundle_part_relative_destination_path(ctx, apple_bundle_part_list_output.info_plist_part) install_data = generate_install_data(ctx, plist_bundle_relative_path) # Collect extra bundle outputs @@ -374,6 +373,28 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: # @oss-disable: extra_output_subtargets = subtargets_for_apple_bundle_extra_outputs(ctx, extra_output_provider) # @oss-disable: sub_targets.update(extra_output_subtargets) + # index store + index_store_subtargets, index_store_info = _index_store_data(ctx) + sub_targets.update(index_store_subtargets) + + bundle_and_dsym_info_json = { + "bundle": bundle, + "dsym": dsym_info_json, + } + bundle_and_dsym_info = ctx.actions.write_json("bundle-and-dsym-info.json", bundle_and_dsym_info_json) + sub_targets["bundle-and-dsym-info"] = [ + DefaultInfo( + default_output = bundle_and_dsym_info, + other_outputs = [bundle] + dsym_artifacts, + ), + ] + + (validation_providers, validation_subtargets) = _get_debug_validators_subtargets_and_providers( + ctx, + aggregated_debug_info.debug_info.debug_info_tset, + ) + sub_targets.update(validation_subtargets) + return [ DefaultInfo(default_output = bundle, sub_targets = sub_targets), AppleBundleInfo( @@ -400,20 +421,52 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: xcode_data_info, extra_output_provider, link_cmd_debug_info, - ] + index_store_info, + ] + bundle_result.providers + validation_providers -def _xcode_populate_attributes(ctx, processed_info_plist: Artifact) -> dict[str, typing.Any]: +def _xcode_populate_attributes(ctx, processed_info_plist: Artifact, info_plist_relative_path: str) -> dict[str, typing.Any]: data = { - "deployment_version": get_bundle_min_target_version(ctx, get_default_binary_dep(ctx.attrs.binary)), - "info_plist": ctx.attrs.info_plist, - "processed_info_plist": processed_info_plist, - "product_name": get_product_name(ctx), - "sdk": get_apple_sdk_name(ctx), + XcodeDataInfoKeys.DEPLOYMENT_VERSION: get_bundle_min_target_version(ctx, get_default_binary_dep(ctx.attrs.binary)), + XcodeDataInfoKeys.INFO_PLIST: ctx.attrs.info_plist, + XcodeDataInfoKeys.PROCESSED_INFO_PLIST: processed_info_plist, + XcodeDataInfoKeys.INFO_PLIST_RELATIVE_PATH: info_plist_relative_path, + XcodeDataInfoKeys.PRODUCT_NAME: get_product_name(ctx), + XcodeDataInfoKeys.SDK: get_apple_sdk_name(ctx), } apple_xcode_data_add_xctoolchain(ctx, data) return data +def _get_debug_validators_subtargets_and_providers(ctx, artifacts: ArtifactTSet) -> (list[Provider], dict[str, list[Provider]]): + name_to_debug_validator_artifact = get_debug_artifacts_validators(ctx, artifacts) + if not name_to_debug_validator_artifact: + return ([], {}) + + return ( + [ + ValidationInfo( + validations = [ + ValidationSpec( + name = name, + validation_result = artifact, + ) + for name, artifact in name_to_debug_validator_artifact.items() + ], + ), + ], + { + "debug-artifacts-validators": [ + DefaultInfo( + default_outputs = name_to_debug_validator_artifact.values(), + sub_targets = { + name: [DefaultInfo(default_output = artifact)] + for name, artifact in name_to_debug_validator_artifact.items() + }, + ), + ], + }, + ) + def _linker_maps_data(ctx: AnalysisContext) -> (Artifact, AppleBundleLinkerMapInfo): deps_with_binary = ctx.attrs.deps + get_flattened_binary_deps(ctx.attrs.binary) deps_linker_map_infos = filter( @@ -439,6 +492,11 @@ def _link_command_debug_data(ctx: AnalysisContext) -> (Artifact, LinkCommandDebu link_cmd_debug_output_file = make_link_command_debug_output_json_info(ctx, all_debug_infos) return link_cmd_debug_output_file, LinkCommandDebugOutputInfo(debug_outputs = all_debug_infos) +def _index_store_data(ctx: AnalysisContext) -> (dict[str, list[Provider]], IndexStoreInfo): + deps_with_binary = ctx.attrs.deps + get_flattened_binary_deps(ctx.attrs.binary) + index_store_subtargets, index_store_info = create_index_store_subtargets_and_provider(ctx, [], deps_with_binary) + return index_store_subtargets, index_store_info + def _extra_output_provider(ctx: AnalysisContext) -> AppleBundleExtraOutputsInfo: # Collect the sub_targets for this bundle's binary that are extra_linker_outputs. extra_outputs = [] diff --git a/prelude/apple/apple_bundle_attrs.bzl b/prelude/apple/apple_bundle_attrs.bzl index 26048160c9a..1595af2b5ff 100644 --- a/prelude/apple/apple_bundle_attrs.bzl +++ b/prelude/apple/apple_bundle_attrs.bzl @@ -5,8 +5,57 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -def get_apple_info_plist_build_system_identification_attrs(): +load("@prelude//apple:apple_platforms.bzl", "APPLE_PLATFORMS_KEY") +load("@prelude//apple:apple_rules_impl_utility.bzl", "apple_bundle_extra_attrs") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") +load("@prelude//decls/ios_rules.bzl", "AppleBundleExtension") + +def _apple_bundle_base_attrs(): return { - "info_plist_identify_build_system": attrs.option(attrs.bool(), default = None), - "_info_plist_identify_build_system_default": attrs.bool(default = False), + # Attributes comes from `attributes.bzl` but since it's autogenerated, we cannot easily abstract + "asset_catalogs_compilation_options": attrs.dict(key = attrs.string(), value = attrs.any(), default = {}), + "codesign_flags": attrs.list(attrs.string(), default = []), + "codesign_identity": attrs.option(attrs.string(), default = None), + "contacts": attrs.list(attrs.string(), default = []), + "default_host_platform": attrs.option(attrs.configuration_label(), default = None), + "default_platform": attrs.option(attrs.string(), default = None), + "deps": attrs.list(attrs.dep(), default = []), + "extension": attrs.one_of(attrs.enum(AppleBundleExtension), attrs.string()), + "ibtool_flags": attrs.option(attrs.list(attrs.string()), default = None), + "ibtool_module_flag": attrs.option(attrs.bool(), default = None), + "incremental_bundling_enabled": attrs.option(attrs.bool(), default = None), + "info_plist": attrs.source(), + "info_plist_substitutions": attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False, default = {}), + "labels": attrs.list(attrs.string(), default = []), + "licenses": attrs.list(attrs.source(), default = []), + "platform_binary": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.dep())), default = None), + "product_name": attrs.option(attrs.string(), default = None), + "resource_group": attrs.option(attrs.string(), default = None), + "resource_group_map": attrs.option(RESOURCE_GROUP_MAP_ATTR, default = None), + "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), + "try_skip_code_signing": attrs.option(attrs.bool(), default = None), + "xcode_product_type": attrs.option(attrs.string(), default = None), } + +def _apple_bundle_default_attrs(): + attributes = {} + attributes.update(_apple_bundle_base_attrs()) + attributes.update(apple_bundle_extra_attrs()) + attributes.update({ + APPLE_PLATFORMS_KEY: attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}), + }) + return attributes + +def apple_watchos_bundle_attrs(): + attributes = _apple_bundle_default_attrs() + attributes.update({ + "bundle_type": attrs.string(default = "watchapp"), + }) + return attributes + +def apple_macos_bundle_attrs(): + attributes = _apple_bundle_default_attrs() + attributes.update({ + "bundle_type": attrs.string(default = "default"), + }) + return attributes diff --git a/prelude/apple/apple_bundle_config.bzl b/prelude/apple/apple_bundle_config.bzl index 47376252fa6..002dc35399b 100644 --- a/prelude/apple/apple_bundle_config.bzl +++ b/prelude/apple/apple_bundle_config.bzl @@ -5,24 +5,38 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load(":apple_code_signing_types.bzl", "CodeSignConfiguration") + def _maybe_get_bool(config: str, default: [None, bool]) -> [None, bool]: result = read_root_config("apple", config, None) if result == None: return default return result.lower() == "true" +def _get_code_signing_configuration() -> str: + is_dry_run = _maybe_get_bool("dry_run_code_signing", False) + + # This is a kill switch for the feature, it can also be disabled by setting + # `apple.fast_adhoc_signing_enabled=false` in a global buckconfig file. + is_fast_adhoc_signing_enabled = _maybe_get_bool("fast_adhoc_signing_enabled", True) + + if is_dry_run: + return CodeSignConfiguration("dry-run").value + elif is_fast_adhoc_signing_enabled: + return CodeSignConfiguration("fast-adhoc").value + else: + return CodeSignConfiguration("none").value + def apple_bundle_config() -> dict[str, typing.Any]: return { "_bundling_cache_buster": read_root_config("apple", "bundling_cache_buster", None), "_bundling_log_file_enabled": _maybe_get_bool("bundling_log_file_enabled", True), "_bundling_log_file_level": read_root_config("apple", "bundling_log_file_level", None), + "_code_signing_configuration": _get_code_signing_configuration(), + "_codesign_identities_command_override": read_root_config("apple", "codesign_identities_command_override", None), "_codesign_type": read_root_config("apple", "codesign_type_override", None), "_compile_resources_locally_override": _maybe_get_bool("compile_resources_locally_override", None), - "_dry_run_code_signing": _maybe_get_bool("dry_run_code_signing", False), "_embed_provisioning_profile_when_adhoc_code_signing": _maybe_get_bool("embed_provisioning_profile_when_adhoc_code_signing", None), - # This is a kill switch for the feature, it can also be disabled by setting - # `apple.fast_adhoc_signing_enabled=false` in a global buckconfig file. - "_fast_adhoc_signing_enabled": _maybe_get_bool("fast_adhoc_signing_enabled", True), "_fast_provisioning_profile_parsing_enabled": _maybe_get_bool("fast_provisioning_profile_parsing_enabled", False), "_incremental_bundling_enabled": _maybe_get_bool("incremental_bundling_enabled", True), "_info_plist_identify_build_system_default": _maybe_get_bool("info_plist_identify_build_system", True), diff --git a/prelude/apple/apple_bundle_destination.bzl b/prelude/apple/apple_bundle_destination.bzl index 2d54b273d1a..172ddb9150d 100644 --- a/prelude/apple/apple_bundle_destination.bzl +++ b/prelude/apple/apple_bundle_destination.bzl @@ -22,10 +22,10 @@ AppleBundleDestination = enum( "headers", "modules", "quicklook", - "watchkitstub", "bundleroot", "loginitems", "appclips", + "extensionkit_extensions", ) AppleBundleDestinationPaths = record( @@ -39,10 +39,10 @@ AppleBundleDestinationPaths = record( headers = field(str, ""), modules = field(str, ""), quicklook = field(str, ""), - watchkitstub = field(str, ""), bundleroot = field(str, ""), loginitems = field(str, ""), appclips = field(str, ""), + extensionkit_extensions = field(str, ""), ) _IOSBundleDestinationPaths = AppleBundleDestinationPaths( @@ -51,8 +51,8 @@ _IOSBundleDestinationPaths = AppleBundleDestinationPaths( xpcservices = "XPCServices", watchapp = "Watch", quicklook = "Library/QuickLook", - watchkitstub = "_WatchKitStub", appclips = "AppClips", + extensionkit_extensions = "Extensions", ) _IOSFrameworkBundleDestinationPaths = AppleBundleDestinationPaths( @@ -74,7 +74,6 @@ _MacOSBundleDestinationPaths = AppleBundleDestinationPaths( headers = macOS_content_path, modules = macOS_content_path, quicklook = paths.join(macOS_content_path, "Library/QuickLook"), - watchkitstub = macOS_content_path, bundleroot = macOS_content_path, loginitems = paths.join(macOS_content_path, "Library/LoginItems"), ) @@ -82,27 +81,43 @@ _MacOSBundleDestinationPaths = AppleBundleDestinationPaths( _MacOSFrameworkBundleDestinationPaths = AppleBundleDestinationPaths( resources = "Resources", frameworks = "Frameworks", + plugins = "PlugIns", xpcservices = "XPCServices", metadata = "Resources", headers = "Headers", modules = "Modules", ) +macOS_versioned_path = "Versions/A" +_MacOSVersionedFrameworkBundleDestinationPaths = AppleBundleDestinationPaths( + resources = paths.join(macOS_versioned_path, "Resources"), + frameworks = paths.join(macOS_versioned_path, "Frameworks"), + plugins = paths.join(macOS_versioned_path, "PlugIns"), + xpcservices = paths.join(macOS_versioned_path, "XPCServices"), + metadata = paths.join(macOS_versioned_path, "Resources"), + headers = paths.join(macOS_versioned_path, "Headers"), + modules = paths.join(macOS_versioned_path, "Modules"), + executables = macOS_versioned_path, +) + def _get_apple_bundle_destinations_for_sdk_name(name: str) -> AppleBundleDestinationPaths: if name == "macosx" or name == "maccatalyst": return _MacOSBundleDestinationPaths else: return _IOSBundleDestinationPaths -def _get_apple_framework_bundle_destinations_for_sdk_name(name: str) -> AppleBundleDestinationPaths: +def _get_apple_framework_bundle_destinations_for_sdk_name(name: str, versioned_macos_bundle: bool) -> AppleBundleDestinationPaths: if name == "macosx" or name == "maccatalyst": - return _MacOSFrameworkBundleDestinationPaths + if versioned_macos_bundle: + return _MacOSVersionedFrameworkBundleDestinationPaths + else: + return _MacOSFrameworkBundleDestinationPaths else: return _IOSFrameworkBundleDestinationPaths -def bundle_relative_path_for_destination(destination: AppleBundleDestination, sdk_name: str, extension: str) -> str: +def bundle_relative_path_for_destination(destination: AppleBundleDestination, sdk_name: str, extension: str, versioned_macos_bundle: bool) -> str: if extension == "framework": - bundle_destinations = _get_apple_framework_bundle_destinations_for_sdk_name(sdk_name) + bundle_destinations = _get_apple_framework_bundle_destinations_for_sdk_name(sdk_name, versioned_macos_bundle) else: bundle_destinations = _get_apple_bundle_destinations_for_sdk_name(sdk_name) @@ -112,6 +127,8 @@ def bundle_relative_path_for_destination(destination: AppleBundleDestination, sd return bundle_destinations.frameworks elif destination.value == "executables": return bundle_destinations.executables + elif destination.value == "extensionkit_extensions": + return bundle_destinations.extensionkit_extensions elif destination.value == "plugins": return bundle_destinations.plugins elif destination.value == "xpcservices": @@ -126,8 +143,6 @@ def bundle_relative_path_for_destination(destination: AppleBundleDestination, sd return bundle_destinations.modules elif destination.value == "quicklook": return bundle_destinations.quicklook - elif destination.value == "watchkitstub": - return bundle_destinations.watchkitstub elif destination.value == "bundleroot": return bundle_destinations.bundleroot elif destination.value == "loginitems": diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index 9ad42106cf3..d9b5b5e14ea 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -7,10 +7,13 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//utils:expect.bzl", "expect") +load("@prelude//utils:utils.bzl", "value_or") load(":apple_bundle_destination.bzl", "AppleBundleDestination", "bundle_relative_path_for_destination") +load(":apple_bundle_types.bzl", "AppleBundleManifest", "AppleBundleManifestInfo", "AppleBundleManifestLogFiles") load(":apple_bundle_utility.bzl", "get_extension_attr", "get_product_name") -load(":apple_code_signing_types.bzl", "CodeSignType") -load(":apple_entitlements.bzl", "get_entitlements_codesign_args") +load(":apple_code_signing_types.bzl", "CodeSignConfiguration", "CodeSignType") +load(":apple_entitlements.bzl", "get_entitlements_codesign_args", "should_include_entitlements") +load(":apple_error_handler.bzl", "apple_build_error_handler") load(":apple_sdk.bzl", "get_apple_sdk_name") load(":apple_sdk_metadata.bzl", "get_apple_sdk_metadata_for_sdk_name") load(":apple_swift_stdlib.bzl", "should_copy_swift_stdlib") @@ -30,12 +33,21 @@ AppleBundlePart = record( new_name = field([str, None], None), # Marks parts which should be code signed separately from the whole bundle. codesign_on_copy = field(bool, False), + # Entitlements to use when this part is code signed separately. + codesign_entitlements = field(Artifact | None, None), + # If present, override the codesign flags with these flags, when this part is code signed separately. + codesign_flags_override = field([list[str], None], None), ) SwiftStdlibArguments = record( primary_binary_rel_path = field(str), ) +AppleBundleConstructionResult = record( + providers = field(list[Provider]), + sub_targets = field(dict[str, list[Provider]]), +) + def bundle_output(ctx: AnalysisContext) -> Artifact: bundle_dir_name = get_bundle_dir_name(ctx) output = ctx.actions.declare_output(bundle_dir_name) @@ -48,27 +60,34 @@ def assemble_bundle( info_plist_part: [AppleBundlePart, None], swift_stdlib_args: [SwiftStdlibArguments, None], extra_hidden: list[Artifact] = [], - skip_adhoc_signing: bool = False) -> dict[str, list[Provider]]: + skip_adhoc_signing: bool = False, + incremental_bundling_override = None) -> AppleBundleConstructionResult: """ Returns extra subtargets related to bundling. """ all_parts = parts + [info_plist_part] if info_plist_part else [] - spec_file = _bundle_spec_json(ctx, all_parts) + codesign_type = _detect_codesign_type(ctx, skip_adhoc_signing) + spec_file = _bundle_spec_json(ctx, all_parts, codesign_type) tools = ctx.attrs._apple_tools[AppleToolsInfo] tool = tools.assemble_bundle codesign_args = [] - codesign_type = _detect_codesign_type(ctx, skip_adhoc_signing) codesign_tool = ctx.attrs._apple_toolchain[AppleToolchainInfo].codesign - if ctx.attrs._dry_run_code_signing: + code_signing_configuration = CodeSignConfiguration(ctx.attrs._code_signing_configuration) + if code_signing_configuration == CodeSignConfiguration("dry-run"): codesign_configuration_args = ["--codesign-configuration", "dry-run"] codesign_tool = tools.dry_codesign_tool - elif ctx.attrs._fast_adhoc_signing_enabled: - codesign_configuration_args = ["--codesign-configuration", "fast-adhoc"] - else: + elif code_signing_configuration == CodeSignConfiguration("fast-adhoc"): + if _get_fast_adhoc_signing_enabled(ctx): + codesign_configuration_args = ["--codesign-configuration", "fast-adhoc"] + else: + codesign_configuration_args = [] + elif code_signing_configuration == CodeSignConfiguration("none"): codesign_configuration_args = [] + else: + fail("Code signing configuration `{}` not supported".format(code_signing_configuration)) codesign_required = codesign_type.value in ["distribution", "adhoc"] swift_support_required = swift_stdlib_args and (not ctx.attrs.skip_copying_swift_stdlib) and should_copy_swift_stdlib(bundle.extension) @@ -84,11 +103,13 @@ def assemble_bundle( "--binary-destination", swift_stdlib_args.primary_binary_rel_path, "--frameworks-destination", - bundle_relative_path_for_destination(AppleBundleDestination("frameworks"), sdk_name, ctx.attrs.extension), + bundle_relative_path_for_destination(AppleBundleDestination("frameworks"), sdk_name, ctx.attrs.extension, ctx.attrs.versioned_macos_bundle), "--plugins-destination", - bundle_relative_path_for_destination(AppleBundleDestination("plugins"), sdk_name, ctx.attrs.extension), + bundle_relative_path_for_destination(AppleBundleDestination("plugins"), sdk_name, ctx.attrs.extension, ctx.attrs.versioned_macos_bundle), + "--extensionkit-extensions-destination", + bundle_relative_path_for_destination(AppleBundleDestination("extensionkit_extensions"), sdk_name, ctx.attrs.extension, ctx.attrs.versioned_macos_bundle), "--appclips-destination", - bundle_relative_path_for_destination(AppleBundleDestination("appclips"), sdk_name, ctx.attrs.extension), + bundle_relative_path_for_destination(AppleBundleDestination("appclips"), sdk_name, ctx.attrs.extension, ctx.attrs.versioned_macos_bundle), "--swift-stdlib-command", cmd_args(ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info.swift_stdlib_tool, delimiter = " ", quote = "shell"), "--sdk-root", @@ -115,6 +136,8 @@ def assemble_bundle( codesign_args.extend(provisioning_profiles_args) identities_command = ctx.attrs._apple_toolchain[AppleToolchainInfo].codesign_identities_command + if ctx.attrs._codesign_identities_command_override: + identities_command = ctx.attrs._codesign_identities_command_override[RunInfo] identities_command_args = ["--codesign-identities-command", cmd_args(identities_command)] if identities_command else [] codesign_args.extend(identities_command_args) @@ -135,24 +158,42 @@ def assemble_bundle( get_apple_bundle_part_relative_destination_path(ctx, info_plist_part), ] if info_plist_part else [] codesign_args.extend(info_plist_args) + + if ctx.attrs.provisioning_profile_filter: + codesign_args.extend([ + "--provisioning-profile-filter", + ctx.attrs.provisioning_profile_filter, + ]) + + strict_provisioning_profile_search = value_or(ctx.attrs.strict_provisioning_profile_search, ctx.attrs._strict_provisioning_profile_search_default) + if strict_provisioning_profile_search: + codesign_args.append("--strict-provisioning-profile-search") elif codesign_type.value == "skip": pass else: fail("Code sign type `{}` not supported".format(codesign_type)) - command = cmd_args([ - tool, - "--output", - bundle.as_output(), - "--spec", - spec_file, - ] + codesign_args + platform_args + swift_args) - command.hidden([part.source for part in all_parts]) + command = cmd_args( + [ + tool, + "--output", + bundle.as_output(), + "--spec", + spec_file, + ] + codesign_args + platform_args + swift_args, + hidden = + [part.source for part in all_parts] + + [part.codesign_entitlements for part in all_parts if part.codesign_entitlements] + + # Ensures any genrule deps get built, such targets are used for validation + extra_hidden, + ) run_incremental_args = {} incremental_state = ctx.actions.declare_output("incremental_state.json").as_output() # Fallback to value from buckconfig incremental_bundling_enabled = ctx.attrs.incremental_bundling_enabled or ctx.attrs._incremental_bundling_enabled + if incremental_bundling_override != None: + incremental_bundling_enabled = incremental_bundling_override if incremental_bundling_enabled: command.add("--incremental-state", incremental_state) @@ -166,7 +207,7 @@ def assemble_bundle( # overwrite file with incremental state so if previous and next builds are incremental # (as opposed to the current non-incremental one), next one won't assume there is a # valid incremental state. - command.hidden(ctx.actions.write_json(incremental_state, {})) + command.add(cmd_args(hidden = ctx.actions.write_json(incremental_state, {}))) category = "apple_assemble_bundle" if ctx.attrs._profile_bundling_enabled: @@ -177,6 +218,7 @@ def assemble_bundle( command.add("--fast-provisioning-profile-parsing") subtargets = {} + bundling_log_output = None if ctx.attrs._bundling_log_file_enabled: bundling_log_output = ctx.actions.declare_output("bundling_log.txt") command.add("--log-file", bundling_log_output.as_output()) @@ -185,14 +227,36 @@ def assemble_bundle( subtargets["bundling-log"] = [DefaultInfo(default_output = bundling_log_output)] command.add("--check-conflicts") + if ctx.attrs.versioned_macos_bundle: + command.add("--versioned-if-macos") command.add(codesign_configuration_args) - # Ensures any genrule deps get built, such targets are used for validation - command.hidden(extra_hidden) - command_json = ctx.actions.declare_output("bundling_command.json") - ctx.actions.write_json(command_json, command, with_inputs = True, pretty = True) - subtargets["command"] = [DefaultInfo(default_output = command_json)] + command_json_cmd_args = ctx.actions.write_json(command_json, command, with_inputs = True, pretty = True) + subtargets["command"] = [DefaultInfo(default_output = command_json, other_outputs = [command_json_cmd_args])] + + bundle_manifest_log_file_map = { + ctx.label: AppleBundleManifestLogFiles( + command_file = command_json, + spec_file = spec_file, + log_file = bundling_log_output, + ), + } + + if hasattr(ctx.attrs, "deps"): + for dep in ctx.attrs.deps: + dep_manifest_info = dep.get(AppleBundleManifestInfo) + if dep_manifest_info: + bundle_manifest_log_file_map.update(dep_manifest_info.manifest.log_file_map) + + bundle_manifest = AppleBundleManifest(log_file_map = bundle_manifest_log_file_map) + bundle_manifest_json_object = _convert_bundle_manifest_to_json_object(bundle_manifest) + + bundle_manifest_json_file = ctx.actions.declare_output("bundle_manifest.json") + bundle_manifest_cmd_args = ctx.actions.write_json(bundle_manifest_json_file, bundle_manifest_json_object, with_inputs = True, pretty = True) + subtargets["manifest"] = [DefaultInfo(default_output = bundle_manifest_json_file, other_outputs = [bundle_manifest_cmd_args])] + + providers = [AppleBundleManifestInfo(manifest = bundle_manifest)] env = {} cache_buster = ctx.attrs._bundling_cache_buster @@ -206,21 +270,23 @@ def assemble_bundle( prefer_local = not force_local_bundling, category = category, env = env, + error_handler = apple_build_error_handler, **run_incremental_args ) - return subtargets + return AppleBundleConstructionResult(sub_targets = subtargets, providers = providers) def get_bundle_dir_name(ctx: AnalysisContext) -> str: return paths.replace_extension(get_product_name(ctx), "." + get_extension_attr(ctx)) def get_apple_bundle_part_relative_destination_path(ctx: AnalysisContext, part: AppleBundlePart) -> str: - bundle_relative_path = bundle_relative_path_for_destination(part.destination, get_apple_sdk_name(ctx), ctx.attrs.extension) + bundle_relative_path = bundle_relative_path_for_destination(part.destination, get_apple_sdk_name(ctx), ctx.attrs.extension, ctx.attrs.versioned_macos_bundle) destination_file_or_directory_name = part.new_name if part.new_name != None else paths.basename(part.source.short_path) return paths.join(bundle_relative_path, destination_file_or_directory_name) # Returns JSON to be passed into bundle assembling tool. It should contain a dictionary which maps bundle relative destination paths to source paths." -def _bundle_spec_json(ctx: AnalysisContext, parts: list[AppleBundlePart]) -> Artifact: +def _bundle_spec_json(ctx: AnalysisContext, parts: list[AppleBundlePart], codesign_type: CodeSignType) -> Artifact: specs = [] + include_entitlements = should_include_entitlements(ctx, codesign_type) for part in parts: part_spec = { @@ -229,9 +295,13 @@ def _bundle_spec_json(ctx: AnalysisContext, parts: list[AppleBundlePart]) -> Art } if part.codesign_on_copy: part_spec["codesign_on_copy"] = True + if include_entitlements and part.codesign_entitlements: + part_spec["codesign_entitlements"] = part.codesign_entitlements + if part.codesign_flags_override: + part_spec["codesign_flags_override"] = part.codesign_flags_override specs.append(part_spec) - return ctx.actions.write_json("bundle_spec.json", specs) + return ctx.actions.write_json("bundle_spec.json", specs, pretty = True) def _get_codesign_type_from_attribs(ctx: AnalysisContext) -> [CodeSignType, None]: # Target-level attribute takes highest priority @@ -245,7 +315,7 @@ def _get_codesign_type_from_attribs(ctx: AnalysisContext) -> [CodeSignType, None def _detect_codesign_type(ctx: AnalysisContext, skip_adhoc_signing: bool) -> CodeSignType: def compute_codesign_type(): - if ctx.attrs.extension not in ["app", "appex", "xctest"]: + if ctx.attrs.extension not in ["app", "appex", "xctest", "driver"]: # Only code sign application bundles, extensions and test bundles return CodeSignType("skip") @@ -278,3 +348,19 @@ def _should_embed_provisioning_profile(ctx: AnalysisContext, codesign_type: Code return ctx.attrs.embed_provisioning_profile_when_adhoc_code_signing return False + +def _convert_bundle_manifest_to_json_object(manifest: AppleBundleManifest) -> dict[Label, typing.Any]: + manifest_dict = {} + for target_label, logs in manifest.log_file_map.items(): + manifest_dict[target_label] = { + "command": logs.command_file, + "log": logs.log_file, + "spec": logs.spec_file, + } + return manifest_dict + +def _get_fast_adhoc_signing_enabled(ctx: AnalysisContext) -> bool: + fast_adhoc_signing_enabled = ctx.attrs.fast_adhoc_signing_enabled + if fast_adhoc_signing_enabled != None: + return fast_adhoc_signing_enabled + return ctx.attrs._fast_adhoc_signing_enabled_default diff --git a/prelude/apple/apple_bundle_resources.bzl b/prelude/apple/apple_bundle_resources.bzl index 8f84a915923..237e32ecb46 100644 --- a/prelude/apple/apple_bundle_resources.bzl +++ b/prelude/apple/apple_bundle_resources.bzl @@ -23,14 +23,16 @@ load( ) load(":apple_bundle_destination.bzl", "AppleBundleDestination") load(":apple_bundle_part.bzl", "AppleBundlePart") -load(":apple_bundle_types.bzl", "AppleBundleInfo", "AppleBundleTypeAppClip", "AppleBundleTypeDefault", "AppleBundleTypeWatchApp") -load(":apple_bundle_utility.bzl", "get_bundle_resource_processing_options", "get_extension_attr", "get_product_name") +load(":apple_bundle_types.bzl", "AppleBundleInfo", "AppleBundleTypeAppClip", "AppleBundleTypeDefault", "AppleBundleTypeExtensionKitExtension", "AppleBundleTypeWatchApp") +load(":apple_bundle_utility.bzl", "get_bundle_resource_processing_options", "get_default_binary_dep", "get_extension_attr", "get_flattened_binary_deps", "get_is_watch_bundle", "get_product_name") load(":apple_core_data.bzl", "compile_apple_core_data") load( ":apple_core_data_types.bzl", "AppleCoreDataSpec", # @unused Used as a type ) load(":apple_info_plist.bzl", "process_info_plist", "process_plist") +load(":apple_library.bzl", "AppleLibraryForDistributionInfo") +load(":apple_library_types.bzl", "AppleLibraryInfo") load( ":apple_resource_types.bzl", "AppleResourceDestination", @@ -84,7 +86,7 @@ def get_apple_bundle_resource_part_list(ctx: AnalysisContext) -> AppleBundleReso ), ) - cxx_sanitizer_runtime_info = ctx.attrs.binary.get(CxxSanitizerRuntimeInfo) if ctx.attrs.binary else None + cxx_sanitizer_runtime_info = get_default_binary_dep(ctx.attrs.binary).get(CxxSanitizerRuntimeInfo) if ctx.attrs.binary else None if cxx_sanitizer_runtime_info: runtime_resource_spec = AppleResourceSpec( files = cxx_sanitizer_runtime_info.runtime_files, @@ -129,6 +131,9 @@ def get_apple_bundle_resource_part_list(ctx: AnalysisContext) -> AppleBundleReso parts.extend(_copy_resources(ctx, resource_specs)) parts.extend(_copy_first_level_bundles(ctx)) + parts.extend(_copy_public_headers(ctx)) + parts.extend(_copy_module_map(ctx)) + parts.extend(_copy_swift_library_evolution_support(ctx)) return AppleBundleResourcePartListOutput( resource_parts = parts, @@ -154,12 +159,12 @@ def _copy_privacy_manifest_if_needed(ctx: AnalysisContext) -> list[AppleBundlePa else: output = ctx.actions.declare_output("PrivacyInfo.xcprivacy") artifact = ctx.actions.copy_file(output.as_output(), privacy_manifest) - return [AppleBundlePart(source = artifact, destination = AppleBundleDestination("metadata"))] + return [AppleBundlePart(source = artifact, destination = AppleBundleDestination("resources"))] def _select_resources(ctx: AnalysisContext) -> ((list[AppleResourceSpec], list[AppleAssetCatalogSpec], list[AppleCoreDataSpec], list[SceneKitAssetsSpec], list[CxxResourceSpec])): resource_group_info = get_resource_group_info(ctx) if resource_group_info: - resource_groups_deps = resource_group_info.implicit_deps + resource_groups_deps = resource_group_info.resource_group_to_implicit_deps_mapping.get(ctx.attrs.resource_group, []) if ctx.attrs.resource_group else [] resource_group_mappings = resource_group_info.mappings else: resource_groups_deps = [] @@ -168,13 +173,77 @@ def _select_resources(ctx: AnalysisContext) -> ((list[AppleResourceSpec], list[A resource_graph = create_resource_graph( ctx = ctx, labels = [], - bundle_binary = ctx.attrs.binary, + bundle_binary = get_default_binary_dep(ctx.attrs.binary), deps = ctx.attrs.deps + resource_groups_deps, exported_deps = [], ) resource_graph_node_map_func = get_resource_graph_node_map_func(resource_graph) return get_filtered_resources(ctx.label, resource_graph_node_map_func, ctx.attrs.resource_group, resource_group_mappings) +def _copy_swift_library_evolution_support(ctx: AnalysisContext) -> list[AppleBundlePart]: + extension = get_extension_attr(ctx) + if not extension == "framework": + return [] + + binary_deps = getattr(ctx.attrs, "binary") + if binary_deps == None: + return [] + + swiftmodule_files = {} + + module_name = None + for binary in get_flattened_binary_deps(binary_deps): + apple_library_for_distribution_info = binary.get(AppleLibraryForDistributionInfo) + if apple_library_for_distribution_info == None: + continue + module_name = apple_library_for_distribution_info.module_name + swiftmodule_files.update({ + apple_library_for_distribution_info.target_triple + ".swiftinterface": apple_library_for_distribution_info.swiftinterface, + apple_library_for_distribution_info.target_triple + ".private.swiftinterface": apple_library_for_distribution_info.private_swiftinterface, + apple_library_for_distribution_info.target_triple + ".swiftdoc": apple_library_for_distribution_info.swiftdoc, + }) + + if len(swiftmodule_files) == 0 or module_name == None: + return [] + + framework_module_dir = ctx.actions.declare_output(module_name + "framework.swiftmodule", dir = True) + ctx.actions.copied_dir(framework_module_dir.as_output(), swiftmodule_files) + return [AppleBundlePart(source = framework_module_dir, destination = AppleBundleDestination("modules"), new_name = module_name + ".swiftmodule")] + +def _copy_public_headers(ctx: AnalysisContext) -> list[AppleBundlePart]: + if not ctx.attrs.copy_public_framework_headers: + return [] + binary_deps = getattr(ctx.attrs, "binary") + if binary_deps == None: + return [] + + binary = get_default_binary_dep(binary_deps) + apple_library_info = binary.get(AppleLibraryInfo) + if apple_library_info == None: + return [] + tset = apple_library_info.public_framework_headers + + bundle_parts = [] + if tset._tset: + for public_framework_headers in tset._tset.traverse(): + for public_framework_header in public_framework_headers: + for artifact in public_framework_header.artifacts: + bundle_parts.append(AppleBundlePart(source = artifact, destination = AppleBundleDestination("headers"))) + + if apple_library_info.swift_header: + bundle_parts.append(AppleBundlePart(source = apple_library_info.swift_header, destination = AppleBundleDestination("headers"))) + + return bundle_parts + +def _copy_module_map(ctx: AnalysisContext) -> list[AppleBundlePart]: + extension = get_extension_attr(ctx) + if not extension == "framework": + return [] + module_map = ctx.attrs.module_map + if not module_map: + return [] + return [AppleBundlePart(source = module_map, destination = AppleBundleDestination("modules"))] + def _copy_resources(ctx: AnalysisContext, specs: list[AppleResourceSpec]) -> list[AppleBundlePart]: result = [] @@ -186,6 +255,8 @@ def _copy_resources(ctx: AnalysisContext, specs: list[AppleResourceSpec]) -> lis destination = bundle_destination, destination_relative_path = None, codesign_on_copy = spec.codesign_files_on_copy, + codesign_entitlements = spec.codesign_entitlements, + codesign_flags_override = spec.codesign_flags_override, ) for x in spec.files] result += _bundle_parts_for_dirs(spec.dirs, bundle_destination, False) result += _bundle_parts_for_dirs(spec.content_dirs, bundle_destination, True) @@ -214,7 +285,23 @@ def _copied_bundle_spec(bundle_info: AppleBundleInfo) -> [None, AppleBundlePart] destination = AppleBundleDestination(app_destination_type) codesign_on_copy = False elif bundle_extension == ".appex": - destination = AppleBundleDestination("plugins") + # We have two types of extensions: App Extensions and ExtensionKit Extensions + # + # +----------------------+-------------------------------+-------------------------------+ + # | | App Extension | ExtensionKit Extension | + # +----------------------+-------------------------------+-------------------------------+ + # | xcode project type | com.apple.product-type.app- | com.apple.product-type. | + # | | extension | extensionkit-extension | + # +----------------------+-------------------------------+-------------------------------+ + # | Info.plist | NSExtensions | EXAppExtensionAttributes | + # +----------------------+-------------------------------+-------------------------------+ + # | bundle folder | *.app/PlugIns | *.app/Extensions | + # +----------------------+-------------------------------+-------------------------------+ + # + if bundle_info.bundle_type == AppleBundleTypeExtensionKitExtension: + destination = AppleBundleDestination("extensionkit_extensions") + else: + destination = AppleBundleDestination("plugins") codesign_on_copy = False elif bundle_extension == ".qlgenerator": destination = AppleBundleDestination("quicklook") @@ -308,12 +395,29 @@ def _run_ibtool( ], allow_args = True, ) - command = cmd_args(["/bin/sh", wrapper_script]).hidden([ibtool_command, output]) + command = cmd_args(["/bin/sh", wrapper_script], hidden = [ibtool_command, output]) else: command = ibtool_command processing_options = get_bundle_resource_processing_options(ctx) - ctx.actions.run(command, prefer_local = processing_options.prefer_local, allow_cache_upload = processing_options.allow_cache_upload, category = "apple_ibtool", identifier = action_identifier) + ctx.actions.run( + command, + prefer_local = processing_options.prefer_local, + prefer_remote = processing_options.prefer_remote, + allow_cache_upload = processing_options.allow_cache_upload, + category = "apple_ibtool", + identifier = action_identifier, + ) + +def _ibtool_identifier(action: str, raw_file: Artifact) -> str: + "*.xib files can live in .lproj folders and have the same name, so we need to split the id" + identifier_parts = [] + variant_name = _get_variant_dirname(raw_file) + if variant_name: + # variant_name is like "zh_TW.lproj", and we only want "zh_TW" + identifier_parts.append(variant_name) + identifier_parts += [raw_file.basename] + return "ibtool_" + action + " " + "/".join(identifier_parts) def _compile_ui_resource( ctx: AnalysisContext, @@ -327,7 +431,7 @@ def _compile_ui_resource( output = output, action_flags = ["--compile"], target_device = target_device, - action_identifier = "compile_" + raw_file.basename, + action_identifier = _ibtool_identifier("compile", raw_file), output_is_dir = output_is_dir, ) @@ -343,7 +447,7 @@ def _link_ui_resource( output = output, action_flags = ["--link"], target_device = target_device, - action_identifier = "link_" + raw_file.basename, + action_identifier = _ibtool_identifier("link", raw_file), output_is_dir = output_is_dir, ) @@ -352,7 +456,9 @@ def _process_apple_resource_file_if_needed( file: Artifact, destination: AppleBundleDestination, destination_relative_path: [str, None], - codesign_on_copy: bool = False) -> AppleBundlePart: + codesign_on_copy: bool = False, + codesign_entitlements: Artifact | None = None, + codesign_flags_override: list[str] | None = None) -> AppleBundlePart: output_dir = "_ProcessedResources" basename = paths.basename(file.short_path) output_is_contents_dir = False @@ -365,6 +471,8 @@ def _process_apple_resource_file_if_needed( action_id = destination_relative_path, ) elif basename.endswith(".storyboard"): + if destination_relative_path: + destination_relative_path = paths.replace_extension(destination_relative_path, ".storyboardc") compiled = ctx.actions.declare_output(paths.join(output_dir, paths.replace_extension(file.short_path, ".storyboardc")), dir = True) if get_is_watch_bundle(ctx): output_is_contents_dir = True @@ -375,6 +483,8 @@ def _process_apple_resource_file_if_needed( processed = compiled _compile_ui_resource(ctx, file, processed.as_output()) elif basename.endswith(".xib"): + if destination_relative_path: + destination_relative_path = paths.replace_extension(destination_relative_path, ".nib") processed = ctx.actions.declare_output(paths.join(output_dir, paths.replace_extension(file.short_path, ".nib"))) _compile_ui_resource(ctx, file, processed.as_output()) else: @@ -383,17 +493,18 @@ def _process_apple_resource_file_if_needed( # When name is empty string only content of the directory will be copied, as opposed to the directory itself. # When name is `None`, directory or file will be copied as it is, without renaming. new_name = destination_relative_path if destination_relative_path else ("" if output_is_contents_dir else None) - return AppleBundlePart(source = processed, destination = destination, new_name = new_name, codesign_on_copy = codesign_on_copy) + return AppleBundlePart(source = processed, destination = destination, new_name = new_name, codesign_on_copy = codesign_on_copy, codesign_entitlements = codesign_entitlements, codesign_flags_override = codesign_flags_override) # Returns a path relative to the _parent_ of the lproj dir. # For example, given a variant file with a short path of`XX/YY.lproj/ZZ` # it would return `YY.lproj/ZZ`. def _get_dest_subpath_for_variant_file(variant_file: Artifact) -> str: - dir_name = paths.basename(paths.dirname(variant_file.short_path)) - if not dir_name.endswith("lproj"): + dir_name = _get_variant_dirname(variant_file) + if not dir_name: fail("Variant files have to be in a directory with name ending in '.lproj' but `{}` was not.".format(variant_file.short_path)) file_name = paths.basename(variant_file.short_path) return paths.join(dir_name, file_name) -def get_is_watch_bundle(ctx: AnalysisContext) -> bool: - return ctx.attrs._apple_toolchain[AppleToolchainInfo].watch_kit_stub_binary != None +def _get_variant_dirname(variant_file: Artifact) -> str | None: + dir_name = paths.basename(paths.dirname(variant_file.short_path)) + return dir_name if dir_name.endswith("lproj") else None diff --git a/prelude/apple/apple_bundle_types.bzl b/prelude/apple/apple_bundle_types.bzl index 527d13c733a..a073a109946 100644 --- a/prelude/apple/apple_bundle_types.bzl +++ b/prelude/apple/apple_bundle_types.bzl @@ -13,6 +13,31 @@ AppleBundleType = enum( "watchapp", # Bundle represents an App Clip to be embedded "appclip", + # Bundle represents an ExtensionKit extension to be embedded + "extensionkit_extension", +) + +ApplePackageExtension = enum( + "ipa", + "pkg", + "dmg", + "zip", +) + +AppleBundleManifestLogFiles = record( + command_file = field(Artifact), + spec_file = field(Artifact), + log_file = field([Artifact, None], None), +) + +AppleBundleManifest = record( + log_file_map = dict[Label, AppleBundleManifestLogFiles], +) + +AppleBundleManifestInfo = provider( + fields = { + "manifest": provider_field(AppleBundleManifest), + }, ) # Provider flagging that result of the rule contains Apple bundle. @@ -26,7 +51,7 @@ AppleBundleInfo = provider( "bundle_type": provider_field(AppleBundleType), # The name of the executable within the bundle. "binary_name": provider_field([str, None], default = None), - # If the bundle contains a Watch Extension executable, we have to update the packaging. + # If the bundle contains a Watch bundle, we have to update the packaging. # Similar to `is_watchos`, this might be omitted for certain types of bundles which don't depend on it. "contains_watchapp": provider_field([bool, None]), # By default, non-framework, non-appex binaries copy Swift libraries into the final @@ -63,15 +88,16 @@ AppleBundleExtraOutputsInfo = provider(fields = { AppleBundleBinaryOutput = record( binary = field(Artifact), debuggable_info = field([AppleDebuggableInfo, None], None), - # In the case of watchkit, the `ctx.attrs.binary`'s not set, and we need to create a stub binary. - is_watchkit_stub_binary = field(bool, False), ) AppleBundleTypeDefault = AppleBundleType("default") AppleBundleTypeWatchApp = AppleBundleType("watchapp") AppleBundleTypeAppClip = AppleBundleType("appclip") +AppleBundleTypeExtensionKitExtension = AppleBundleType("extensionkit_extension") # Represents the user-visible type which is distinct from the internal one (`AppleBundleType`) AppleBundleTypeAttributeType = enum( "appclip", + "extensionkit_extension", + "watchapp", ) diff --git a/prelude/apple/apple_bundle_utility.bzl b/prelude/apple/apple_bundle_utility.bzl index 72ef8d26593..abf26c7d9d1 100644 --- a/prelude/apple/apple_bundle_utility.bzl +++ b/prelude/apple/apple_bundle_utility.bzl @@ -14,6 +14,9 @@ load(":resource_groups.bzl", "ResourceGraphInfo") # `ctx` in all functions below is expected to be of `apple_bundle` or `apple_test` rule +def get_is_watch_bundle(ctx: AnalysisContext) -> bool: + return ctx.attrs._apple_toolchain[AppleToolchainInfo].sdk_name.startswith("watch") + def _get_bundle_target_name(ctx: AnalysisContext): if hasattr(ctx.attrs, "_bundle_target_name"): # `apple_resource_bundle` rules are proxies for the real rules, @@ -27,7 +30,10 @@ def get_product_name(ctx: AnalysisContext) -> str: def get_extension_attr(ctx: AnalysisContext) -> typing.Any: return ctx.attrs.extension -def get_default_binary_dep(binary_deps: dict[str, Dependency]) -> [Dependency, None]: +def get_default_binary_dep(binary_deps: [dict[str, Dependency], Dependency, None]) -> [Dependency, None]: + if not type(binary_deps) == "dict": + return binary_deps + if len(binary_deps.items()) == 1: return binary_deps.values()[0] @@ -39,20 +45,12 @@ def get_flattened_binary_deps(binary_deps: dict[str, Dependency]) -> list[Depend # Derives the effective deployment target for the bundle. It's # usually the deployment target of the binary if present, # otherwise it falls back to other values (see implementation). -def get_bundle_min_target_version(ctx: AnalysisContext, binary: [Dependency, None]) -> str: +def get_bundle_min_target_version(ctx: AnalysisContext, binary_or_binaries: [dict[str, Dependency], Dependency, None]) -> str: + binary = get_default_binary_dep(binary_or_binaries) + binary_min_version = None - # Could be not set for e.g. watchOS bundles which have a stub - # binary that comes from the apple_toolchain(), not from the - # apple_bundle() itself (i.e., binary field will be None). - # - # TODO(T114147746): The top-level stub bundle for a watchOS app - # does not have the ability to set its deployment target via - # a binary (as that field is empty). If it contains asset - # catalogs (can it?), we need to use correct target version. - # - # The solution might to be support SDK version from - # Info.plist (T110378109). + # apple_xcuitest bundles do not have a binary if binary != None: min_version_info = binary[AppleMinDeploymentVersionInfo] if AppleMinDeploymentVersionInfo in binary else None if min_version_info != None: @@ -69,7 +67,13 @@ def get_bundle_min_target_version(ctx: AnalysisContext, binary: [Dependency, Non def get_bundle_resource_processing_options(ctx: AnalysisContext) -> AppleResourceProcessingOptions: compile_resources_locally = value_or(ctx.attrs._compile_resources_locally_override, ctx.attrs._apple_toolchain[AppleToolchainInfo].compile_resources_locally) - return AppleResourceProcessingOptions(prefer_local = compile_resources_locally, allow_cache_upload = compile_resources_locally) + is_watch_bundle = get_is_watch_bundle(ctx) + return AppleResourceProcessingOptions( + prefer_local = compile_resources_locally and (not is_watch_bundle), + # TODO: Remote execution preference should be part of `apple_toolchain()`, same as `compile_resources_locally` + prefer_remote = is_watch_bundle, + allow_cache_upload = compile_resources_locally, + ) def get_bundle_infos_from_graph(graph: ResourceGraphInfo) -> list[AppleBundleLinkerMapInfo]: bundle_infos = [] diff --git a/prelude/apple/apple_code_signing_types.bzl b/prelude/apple/apple_code_signing_types.bzl index 555a04f8aac..66ac6cad95c 100644 --- a/prelude/apple/apple_code_signing_types.bzl +++ b/prelude/apple/apple_code_signing_types.bzl @@ -7,7 +7,7 @@ # Provider which exposes a field from `apple_binary` to `apple_bundle` as it might be used during code signing. AppleEntitlementsInfo = provider(fields = { - "entitlements_file": provider_field([Artifact, None], default = None), + "entitlements_file": provider_field(Artifact | None, default = None), }) CodeSignType = enum( @@ -15,3 +15,9 @@ CodeSignType = enum( "adhoc", "distribution", ) + +CodeSignConfiguration = enum( + "dry-run", + "fast-adhoc", + "none", +) diff --git a/prelude/decls/apple_common.bzl b/prelude/apple/apple_common.bzl similarity index 64% rename from prelude/decls/apple_common.bzl rename to prelude/apple/apple_common.bzl index 92ff6803644..9e5b8dead81 100644 --- a/prelude/decls/apple_common.bzl +++ b/prelude/apple/apple_common.bzl @@ -10,6 +10,8 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) +load("@prelude//:is_full_meta_repo.bzl", "is_full_meta_repo") + def _headers_arg(): return { "headers": attrs.named_set(attrs.source(), sorted = True, default = [], doc = """ @@ -133,6 +135,78 @@ def _privacy_manifest_arg(): """), } +def _debug_artifacts_validators_arg(): + return { + "debug_artifacts_validators": attrs.dict( + attrs.string(), + attrs.tuple( + # A target which will be passed two named arguments: + # --artifacts: A path to a file containing a list of artifact paths to inspect. + # --output: The path to write the analysis output to. + attrs.exec_dep(providers = [RunInfo]), + # A target which is passed the outputs of the previous script + # and emits a ValidationSpec validation_result JSON file. + # --analysis-json-path: A path to a JSON artifact. Keys are the configured target. + # --output: The path to write the ValidationSpec validation_result JSON file. + # value is a list of artifact outputs from the previous script. + attrs.exec_dep(providers = [RunInfo]), + ), + default = {}, + ), + } + +def _serialize_debugging_options_arg(): + return { + # Need ability to distinguish between no value provided by users + # vs value explicitly set to `True` (in the latter case, we should + # show warning if value cannot be respected in mixed modules while + # in the former, we do not show a warning). + # + # Lack of value defaults to enabling serialized debugging options. + "serialize_debugging_options": attrs.option(attrs.bool(), default = None), + } + +def _uses_explicit_modules_arg(): + return { + "uses_explicit_modules": attrs.bool(default = False), + } + +def _meta_apple_library_validation_enabled_default_value(): + if not is_full_meta_repo(): + return False + + meta_apple_library_validation_enabled_default = (read_root_config("apple", "meta_apple_library_validation", "false").lower() == "true") + return select({ + "DEFAULT": select({ + "DEFAULT": meta_apple_library_validation_enabled_default, + "config//features/apple:fb_xplat_suffixing_check_disabled": False, + "config//features/apple:fb_xplat_suffixing_check_enabled": True, + }), + # arvr targets do not use suffixed targets, as any xplat target deps + # get rewritten without the Apple-specific suffixes. + "config//build_mode/constraints:arvr_mode_enabled": False, + }) + +def _meta_apple_library_validation_enabled_arg(): + return { + "_meta_apple_library_validation_enabled": attrs.bool(default = _meta_apple_library_validation_enabled_default_value()), + } + +def _skip_universal_resource_dedupe_default_value(): + if not is_full_meta_repo(): + return False + + return select({ + "DEFAULT": False, + "config//features/apple:skip_universal_resource_dedupe_disabled": False, + "config//features/apple:skip_universal_resource_dedupe_enabled": True, + }) + +def _skip_universal_resource_dedupe_arg(): + return { + "skip_universal_resource_dedupe": attrs.bool(default = _skip_universal_resource_dedupe_default_value()), + } + apple_common = struct( headers_arg = _headers_arg, exported_headers_arg = _exported_headers_arg, @@ -144,4 +218,9 @@ apple_common = struct( extra_xcode_sources = _extra_xcode_sources, extra_xcode_files = _extra_xcode_files, privacy_manifest_arg = _privacy_manifest_arg, + debug_artifacts_validators_arg = _debug_artifacts_validators_arg, + serialize_debugging_options_arg = _serialize_debugging_options_arg, + uses_explicit_modules_arg = _uses_explicit_modules_arg, + meta_apple_library_validation_enabled_arg = _meta_apple_library_validation_enabled_arg, + skip_universal_resource_dedupe_arg = _skip_universal_resource_dedupe_arg, ) diff --git a/prelude/apple/apple_core_data.bzl b/prelude/apple/apple_core_data.bzl index 82721a4af96..6c0becbc6ef 100644 --- a/prelude/apple/apple_core_data.bzl +++ b/prelude/apple/apple_core_data.bzl @@ -5,14 +5,17 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:paths.bzl", "paths") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_bundle_resource_processing_options") load(":apple_core_data_types.bzl", "AppleCoreDataSpec") load(":apple_sdk.bzl", "get_apple_sdk_name") +load(":apple_target_sdk_version.bzl", "get_platform_name_for_sdk", "get_platform_version_for_sdk_version") load(":resource_groups.bzl", "create_resource_graph") def apple_core_data_impl(ctx: AnalysisContext) -> list[Provider]: spec = AppleCoreDataSpec( + module = ctx.attrs.module, path = ctx.attrs.path, ) graph = create_resource_graph( @@ -24,17 +27,18 @@ def apple_core_data_impl(ctx: AnalysisContext) -> list[Provider]: ) return [DefaultInfo(), graph] -def compile_apple_core_data(ctx: AnalysisContext, specs: list[AppleCoreDataSpec], product_name: str) -> [Artifact, None]: +def compile_apple_core_data(ctx: AnalysisContext, specs: list[AppleCoreDataSpec], product_name: str) -> Artifact | None: if len(specs) == 0: return None output = ctx.actions.declare_output("AppleCoreDataCompiled") - # Aggregate all the coredata momc commands together - momc_commands = [] + # Aggregate all the coredata momc and mapc commands together + tool_commands = [] for spec in specs: - momc_command = _get_momc_command(ctx, spec, product_name, cmd_args("$TMPDIR")) - momc_commands.append(momc_command) + tool, output_path = _get_model_args(ctx, spec) + tool_command = _get_tool_command(ctx, spec, product_name, tool, output_path) + tool_commands.append(tool_command) # Sandboxing and fs isolation on RE machines results in Xcode tools failing # when those are working in freshly created directories in buck-out. @@ -42,29 +46,50 @@ def compile_apple_core_data(ctx: AnalysisContext, specs: list[AppleCoreDataSpec] # As a workaround create a directory in tmp, use it for Xcode tools, then # copy the result to buck-out. wrapper_script, _ = ctx.actions.write( - "momc_wrapper.sh", + "tool_wrapper.sh", [ cmd_args("set -euo pipefail"), cmd_args('export TMPDIR="$(mktemp -d)"'), - cmd_args(momc_commands), + cmd_args(tool_commands), cmd_args(output, format = 'mkdir -p {} && cp -r "$TMPDIR"/ {}'), ], allow_args = True, ) - combined_command = cmd_args(["/bin/sh", wrapper_script]).hidden(momc_commands + [output.as_output()]) + combined_command = cmd_args(["/bin/sh", wrapper_script], hidden = tool_commands + [output.as_output()]) processing_options = get_bundle_resource_processing_options(ctx) - ctx.actions.run(combined_command, prefer_local = processing_options.prefer_local, allow_cache_upload = processing_options.allow_cache_upload, category = "apple_core_data") + ctx.actions.run( + combined_command, + prefer_local = processing_options.prefer_local, + prefer_remote = processing_options.prefer_remote, + allow_cache_upload = processing_options.allow_cache_upload, + category = "apple_core_data", + ) return output -def _get_momc_command(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec, product_name: str, output_directory: cmd_args) -> cmd_args: +def _get_model_args(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec): + toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo] + + if core_data_spec.path.extension == ".xcmappingmodel": + filename = paths.replace_extension(core_data_spec.path.basename, ".cdm") + return toolchain.mapc, cmd_args("$TMPDIR/" + filename) + else: + return toolchain.momc, cmd_args("$TMPDIR") + +def _get_tool_command(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec, product_name: str, tool: RunInfo, output: cmd_args) -> cmd_args: + sdk_name = get_apple_sdk_name(ctx) + deployment_target = get_platform_version_for_sdk_version( + sdk_name = sdk_name, + sdk_version = get_bundle_min_target_version(ctx, ctx.attrs.binary), + ) + return cmd_args([ - ctx.attrs._apple_toolchain[AppleToolchainInfo].momc, + tool, "--sdkroot", ctx.attrs._apple_toolchain[AppleToolchainInfo].sdk_path, - "--" + get_apple_sdk_name(ctx) + "-deployment-target", - get_bundle_min_target_version(ctx, ctx.attrs.binary), + "--" + get_platform_name_for_sdk(sdk_name) + "-deployment-target", + deployment_target, "--module", - product_name, - core_data_spec.path, - output_directory, - ], delimiter = " ") + core_data_spec.module if core_data_spec.module else product_name, + cmd_args(core_data_spec.path, format = "./{}"), + output, + ], delimiter = " ", hidden = core_data_spec.path) diff --git a/prelude/apple/apple_core_data_types.bzl b/prelude/apple/apple_core_data_types.bzl index 700a5d602bf..a3cc54c047a 100644 --- a/prelude/apple/apple_core_data_types.bzl +++ b/prelude/apple/apple_core_data_types.bzl @@ -6,5 +6,6 @@ # of this source tree. AppleCoreDataSpec = record( + module = field(str | None), path = field(Artifact), ) diff --git a/prelude/apple/apple_dsym.bzl b/prelude/apple/apple_dsym.bzl index 49616c3cc14..f0726af1bea 100644 --- a/prelude/apple/apple_dsym.bzl +++ b/prelude/apple/apple_dsym.bzl @@ -18,36 +18,46 @@ def get_apple_dsym(ctx: AnalysisContext, executable: Artifact, debug_info: list[ # TODO(T110672942): Things which are still unsupported: # - oso_prefix -# - dsym_verification def get_apple_dsym_ext(ctx: AnalysisContext, executable: [ArgLike, Artifact], debug_info: list[ArgLike], action_identifier: str, output_path: str) -> Artifact: dsymutil = ctx.attrs._apple_toolchain[AppleToolchainInfo].dsymutil output = ctx.actions.declare_output(output_path, dir = True) - - cmd = cmd_args([dsymutil] + ctx.attrs._dsymutil_extra_flags + ["-o", output.as_output()]) - cmd.add(executable) - - # Mach-O executables don't contain DWARF data. - # Instead, they contain paths to the object files which themselves contain DWARF data. - # - # So, those object files are needed for dsymutil to be to create the dSYM bundle. - cmd.hidden(debug_info) + cmd = cmd_args( + [ + dsymutil, + "--verify-dwarf={}".format(ctx.attrs._dsymutil_verify_dwarf), + # Reproducers are not useful, we can reproduce from the action digest. + "--reproducer=Off", + ], + # Mach-O executables don't contain DWARF data. + # Instead, they contain paths to the object files which themselves contain DWARF data. + # So, those object files are needed for dsymutil to be to create the dSYM bundle. + hidden = debug_info, + ) + if ctx.attrs.dsym_uses_parallel_linker: + cmd.add("--linker=parallel") + + cmd.add(ctx.attrs._dsymutil_extra_flags) + cmd.add( + [ + "-o", + output.as_output(), + executable, + ], + ) ctx.actions.run(cmd, category = "apple_dsym", identifier = action_identifier) - return output -def get_apple_dsym_info(ctx: AnalysisContext, binary_dsyms: list[Artifact], dep_dsyms: list[Artifact]) -> Artifact: +def get_apple_dsym_info_json(binary_dsyms: list[Artifact], dep_dsyms: list[Artifact]) -> dict[str, typing.Any]: dsym_info = {} - # WatchOS stub does not have a dSYM, so it's possible that we get zero `binary_dsyms` if len(binary_dsyms) == 1: dsym_info["binary"] = binary_dsyms[0] - elif len(binary_dsyms) > 1: - fail("There cannot be more than one binary dSYM") + else: + fail("There can only be one binary dSYM") if dep_dsyms: # `dedupe` needed as it's possible for the same dSYM to bubble up # through multiple paths in a graph (e.g., including both a binary # + bundle in the `deps` field of a parent bundle). dsym_info["deps"] = dedupe(dep_dsyms) - - return ctx.actions.write_json("dsym-info.json", dsym_info) + return dsym_info diff --git a/prelude/apple/apple_dsym_config.bzl b/prelude/apple/apple_dsym_config.bzl index 1e29bb0fdc6..6d5dd7404df 100644 --- a/prelude/apple/apple_dsym_config.bzl +++ b/prelude/apple/apple_dsym_config.bzl @@ -5,9 +5,12 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//utils:buckconfig.bzl", "read_list") +load("@prelude//utils:buckconfig.bzl", "read_choice", "read_list") def apple_dsym_config() -> dict[str, typing.Any]: return { "_dsymutil_extra_flags": read_list("apple", "dsymutil_extra_flags", delimiter = " ", default = [], root_cell = True), + # The default value of `--verify-dwarf` depends on the toolchain build mode. Default to `none` to unify behavior. + # https://github.com/llvm/llvm-project/blob/e3eb12cce97fa75d1d2443bcc2c2b26aa660fe34/llvm/tools/dsymutil/dsymutil.cpp#L94-L98 + "_dsymutil_verify_dwarf": read_choice("apple", "dsymutil_verify_dwarf", choices = ["none", "input", "output", "all", "auto"], default = "none", root_cell = True), } diff --git a/prelude/apple/apple_entitlements.bzl b/prelude/apple/apple_entitlements.bzl index 6a44ffb45ae..6342f926eb8 100644 --- a/prelude/apple/apple_entitlements.bzl +++ b/prelude/apple/apple_entitlements.bzl @@ -12,12 +12,12 @@ load(":apple_sdk_metadata.bzl", "IPhoneSimulatorSdkMetadata", "MacOSXCatalystSdk load(":apple_toolchain_types.bzl", "AppleToolchainInfo") def get_entitlements_codesign_args(ctx: AnalysisContext, codesign_type: CodeSignType) -> list[ArgLike]: - include_entitlements = _should_include_entitlements(ctx, codesign_type) + include_entitlements = should_include_entitlements(ctx, codesign_type) maybe_entitlements = _entitlements_file(ctx) if include_entitlements else None entitlements_args = ["--entitlements", maybe_entitlements] if maybe_entitlements else [] return entitlements_args -def _should_include_entitlements(ctx: AnalysisContext, codesign_type: CodeSignType) -> bool: +def should_include_entitlements(ctx: AnalysisContext, codesign_type: CodeSignType) -> bool: if codesign_type.value == "distribution": return True @@ -29,7 +29,7 @@ def _should_include_entitlements(ctx: AnalysisContext, codesign_type: CodeSignTy return False -def _entitlements_file(ctx: AnalysisContext) -> [Artifact, None]: +def _entitlements_file(ctx: AnalysisContext) -> Artifact | None: if hasattr(ctx.attrs, "entitlements_file"): # Bundling `apple_test` which doesn't have a binary to provide the entitlements, so they are provided via `entitlements_file` attribute directly. return ctx.attrs.entitlements_file @@ -38,7 +38,7 @@ def _entitlements_file(ctx: AnalysisContext) -> [Artifact, None]: return None # The `binary` attribute can be either an apple_binary or a dynamic library from apple_library - binary_entitlement_info = get_default_binary_dep(ctx.attrs.binary)[AppleEntitlementsInfo] + binary_entitlement_info = get_default_binary_dep(ctx.attrs.binary).get(AppleEntitlementsInfo) if binary_entitlement_info and binary_entitlement_info.entitlements_file: return binary_entitlement_info.entitlements_file diff --git a/prelude/apple/apple_error_handler.bzl b/prelude/apple/apple_error_handler.bzl new file mode 100644 index 00000000000..674182f3998 --- /dev/null +++ b/prelude/apple/apple_error_handler.bzl @@ -0,0 +1,63 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//apple:apple_error_handler_types.bzl", "AppleErrorCategories") +# @oss-disable: load("@prelude//apple/meta_only:apple_extra_error_categories.bzl", "APPLE_META_STDERR_ERROR_CATEGORIES") + +_APPLE_STDERR_ERROR_CATEGORIES = [ + #codesigning issues + AppleErrorCategories(string_match = "codesignprovisioningerror", categories = ["apple_code_sign_error", "code_sign_provisioning_error"]), + AppleErrorCategories(string_match = "the timestamp service is not available", categories = ["apple_code_sign_error"]), + #compilation issues + AppleErrorCategories(string_match = "failed to emit precompiled module", categories = ["apple_compilation_failure", "apple_pcm_compilation_failure"]), + AppleErrorCategories(string_match = "please rebuild precompiled header", categories = ["apple_compilation_failure", "apple_pcm_compilation_failure"]), + AppleErrorCategories(string_match = "llvm-lipo", categories = ["apple_lipo_failure"]), + AppleErrorCategories(string_match = ".swift:", categories = ["apple_compilation_failure", "apple_swift_compilation_failure"]), + AppleErrorCategories(string_match = ".cpp:", categories = ["apple_compilation_failure", "apple_cpp_compilation_failure"]), + AppleErrorCategories(string_match = ".cxx:", categories = ["apple_compilation_failure", "apple_cpp_compilation_failure"]), + AppleErrorCategories(string_match = ".m:", categories = ["apple_compilation_failure", "apple_objc_compilation_failure"]), + AppleErrorCategories(string_match = ".mm:", categories = ["apple_compilation_failure", "apple_objc_compilation_failure", "apple_cpp_compilation_failure", "apple_objcpp_compilation_failure"]), + AppleErrorCategories(string_match = ".c:", categories = ["apple_compilation_failure", "apple_c_compilation_failure"]), + AppleErrorCategories(string_match = ".modulemap:", categories = ["apple_compilation_failure", "apple_modulemap_compilation_failure"]), + AppleErrorCategories(string_match = "missing required modules", categories = ["apple_compilation_failure", "apple_missing_required_modules_error"]), + AppleErrorCategories(string_match = "has a minimum deployment target", categories = ["apple_compilation_failure", "apple_deployment_target_error"]), + + #toolchain / genrule issues + AppleErrorCategories(string_match = "stack dump:", categories = ["apple_binary_execution_failure"]), + AppleErrorCategories(string_match = "thread 'main' panicked", categories = ["apple_binary_execution_failure"]), + AppleErrorCategories(string_match = "error while loading shared libraries", categories = ["apple_binary_execution_failure"]), + AppleErrorCategories(string_match = "traceback (most recent call last)", categories = ["apple_python_execution_failure"]), + AppleErrorCategories(string_match = "command not found", categories = ["apple_command_not_found_failure"]), + AppleErrorCategories(string_match = "command timed out", categories = ["apple_timeout_failure"]), + AppleErrorCategories(string_match = "no such file or directory", categories = ["apple_no_such_file_failure"]), + + #user errors + AppleErrorCategories(string_match = "unknown target", categories = ["apple_unknown_buck_target_failure"]), + + #linker issues + AppleErrorCategories(string_match = "linker command failed", categories = ["apple_linker_failure"]), + AppleErrorCategories(string_match = "duplicate symbol", categories = ["apple_duplicate_symbol_failure"]), + AppleErrorCategories(string_match = "undefined symbol", categories = ["apple_undefined_symbol_failure"]), + AppleErrorCategories(string_match = "framework not found", categories = ["apple_framework_not_found_failure"]), + + #buck configuration issues + AppleErrorCategories(string_match = "unknown cell alias", categories = ["apple_buck_configuration_failure", "apple_unknown_cell_alias_failure"]), +] + +def _add_category_strings(lowercase_stderr: str, category_string_target: set[str], source: list[AppleErrorCategories]): + for error_category in source: + if error_category.string_match in lowercase_stderr: + for category_string in error_category.categories: + category_string_target.add(category_string) + +def apple_build_error_handler(ctx: ActionErrorCtx) -> list[ActionSubError]: + lowercase_stderr = ctx.stderr.lower() + categories = set() + _add_category_strings(lowercase_stderr, categories, _APPLE_STDERR_ERROR_CATEGORIES) + # @oss-disable: _add_category_strings(lowercase_stderr, categories, APPLE_META_STDERR_ERROR_CATEGORIES) + + return [ctx.new_sub_error(category = category_string) for category_string in sorted(categories)] diff --git a/prelude/apple/apple_error_handler_types.bzl b/prelude/apple/apple_error_handler_types.bzl new file mode 100644 index 00000000000..02b71f2c063 --- /dev/null +++ b/prelude/apple/apple_error_handler_types.bzl @@ -0,0 +1,14 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +AppleErrorCategories = record( + # Lowercase string that should (preferably uniquely) match the lowercased + # stderr output caused by an error of interest. + string_match = str, + # List of category tags to be applied in the event of this error. + categories = list[str], +) diff --git a/prelude/apple/apple_finalize_codesign.bzl b/prelude/apple/apple_finalize_codesign.bzl new file mode 100644 index 00000000000..64ccb4f152a --- /dev/null +++ b/prelude/apple/apple_finalize_codesign.bzl @@ -0,0 +1,50 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":apple_bundle_types.bzl", "AppleBundleInfo") + +def _apple_finalize_bundle_impl(ctx): + bundle_artifact = ctx.attrs.bundle[DefaultInfo].default_outputs[0] + finalized_bundle = ctx.actions.declare_output(bundle_artifact.basename) + + cmd = cmd_args([ + ctx.attrs.finalizer[RunInfo], + "--input-bundle-path", + bundle_artifact, + "--output-bundle-path", + finalized_bundle.as_output(), + "--sign-key", + ctx.attrs.sign_key, + ]) + ctx.actions.run( + cmd, + category = "apple_finalize_bundle", + identifier = bundle_artifact.basename, + ) + + original_bundle_info = ctx.attrs.bundle[AppleBundleInfo] + finalized_bundle_info = AppleBundleInfo( + bundle = finalized_bundle, + bundle_type = original_bundle_info.bundle_type, + binary_name = original_bundle_info.binary_name, + contains_watchapp = original_bundle_info.contains_watchapp, + skip_copying_swift_stdlib = original_bundle_info.skip_copying_swift_stdlib, + ) + + return [ + DefaultInfo(default_output = finalized_bundle), + finalized_bundle_info, + ] + +apple_finalize_bundle = rule( + attrs = { + "bundle": attrs.dep(), + "finalizer": attrs.exec_dep(providers = [RunInfo]), + "sign_key": attrs.string(default = "fbios-debug"), + }, + impl = _apple_finalize_bundle_impl, +) diff --git a/prelude/apple/apple_framework_versions.bzl b/prelude/apple/apple_framework_versions.bzl index 9b86b910f41..f876d83cb9b 100644 --- a/prelude/apple/apple_framework_versions.bzl +++ b/prelude/apple/apple_framework_versions.bzl @@ -9,7 +9,7 @@ load(":apple_sdk.bzl", "get_apple_sdk_name") load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node") load(":apple_utility.bzl", "has_apple_toolchain") -_FRAMEWORK_INTRODUCED_VERSIONS = { +FRAMEWORK_INTRODUCED_VERSIONS = { "AGL": {"macosx": (10, 0, 0)}, "ARKit": {"iphoneos": (11, 0, 0), "maccatalyst": (14, 0, 0)}, "AVFAudio": { @@ -52,6 +52,9 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "macosx": (11, 0, 0), "watchos": (7, 0, 0), }, + "AccessorySetupKit": { + "iphoneos": (18, 0, 0), + }, "Accounts": { "iphoneos": (5, 0, 0), "maccatalyst": (13, 0, 0), @@ -188,6 +191,9 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "macosx": (10, 15, 0), "watchos": (6, 0, 0), }, + "ContactProvider": { + "iphoneos": (18, 0, 0), + }, "Contacts": { "iphoneos": (9, 0, 0), "maccatalyst": (13, 0, 0), @@ -246,6 +252,9 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "macosx": (10, 8, 0), "watchos": (2, 0, 0), }, + "CoreHID": { + "macosx": (15, 0, 0), + }, "CoreHaptics": { "appletvos": (14, 0, 0), "iphoneos": (13, 0, 0), @@ -426,6 +435,9 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "maccatalyst": (13, 0, 0), "macosx": (10, 13, 0), }, + "FSKit": { + "macosx": (15, 0, 0), + }, "FamilyControls": {"iphoneos": (15, 0, 0), "maccatalyst": (15, 0, 0)}, "FileProvider": {"iphoneos": (11, 0, 0), "macosx": (10, 15, 0)}, "FileProviderUI": { @@ -582,6 +594,10 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "maccatalyst": (16, 0, 0), "macosx": (12, 0, 0), }, + "LockedCameraCapture": { + "iphoneos": (18, 0, 0), + "maccatalyst": (18, 0, 0), + }, "MLCompute": { "appletvos": (14, 0, 0), "iphoneos": (14, 0, 0), @@ -612,6 +628,10 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "maccatalyst": (13, 0, 0), "macosx": (10, 9, 0), }, + "MediaExtension": { + "maccatalyst": (18, 0, 0), + "macosx": (15, 0, 0), + }, "MediaLibrary": {"maccatalyst": (13, 0, 0), "macosx": (10, 9, 0)}, "MediaPlayer": { "appletvos": (9, 0, 0), @@ -621,6 +641,10 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "watchos": (5, 0, 0), }, "MediaSetup": {"iphoneos": (14, 0, 0), "maccatalyst": (15, 4, 0)}, + "MediaToolbox": { + "iphoneos": (6, 0, 0), + "macosx": (10, 9, 0), + }, "MessageUI": {"iphoneos": (3, 0, 0), "maccatalyst": (13, 0, 0)}, "Messages": {"iphoneos": (10, 0, 0), "maccatalyst": (14, 0, 0)}, "Metal": { @@ -719,6 +743,14 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "macosx": (10, 15, 0), "watchos": (8, 0, 0), }, + "Observation": { + "iphoneos": (17, 0, 0), + "maccatalyst": (17, 0, 0), + "macosx": (14, 0, 0), + "tvos": (17, 0, 0), + "visionos": (1, 0, 0), + "watchos": (10, 0, 0), + }, "OpenCL": {"macosx": (10, 6, 0)}, "OpenDirectory": {"maccatalyst": (13, 0, 0), "macosx": (10, 6, 0)}, "OpenGL": {"macosx": (10, 0, 0)}, @@ -761,6 +793,10 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "watchos": (9, 0, 0), }, "PreferencePanes": {"maccatalyst": (14, 0, 0), "macosx": (10, 1, 0)}, + "ProximitReaderStub": { + "maccatalyst": (18, 0, 0), + "macosx": (15, 0, 0), + }, "ProximityReader": {"iphoneos": (15, 4, 0), "maccatalyst": (15, 4, 0)}, "PushKit": { "iphoneos": (8, 0, 0), @@ -835,6 +871,11 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { }, "SecurityFoundation": {"maccatalyst": (13, 0, 0), "macosx": (10, 3, 0)}, "SecurityInterface": {"macosx": (10, 3, 0)}, + "SensitiveContentAnalysis": { + "iphoneos": (17, 0, 0), + "maccatalyst": (17, 0, 0), + "macosx": (14, 0, 0), + }, "SensorKit": { "iphoneos": (14, 0, 0), "maccatalyst": (14, 0, 0), @@ -884,6 +925,10 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "macosx": (10, 9, 0), "watchos": (3, 0, 0), }, + "StickerKit": { + "iphoneos": (18, 0, 0), + "macosx": (15, 0, 0), + }, "StoreKit": { "appletvos": (9, 0, 0), "iphoneos": (3, 0, 0), @@ -899,6 +944,14 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "watchos": (6, 0, 0), }, "SyncServices": {"macosx": (10, 4, 0)}, + "Synchronization": { + "iphoneos": (18, 0, 0), + "maccatalyst": (18, 0, 0), + "macosx": (15, 0, 0), + "tvos": (18, 0, 0), + "visionos": (2, 0, 0), + "watchos": (11, 0, 0), + }, "System": { "appletvos": (14, 0, 0), "iphoneos": (14, 0, 0), @@ -917,6 +970,9 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "TVServices": {"appletvos": (9, 0, 0)}, "TVUIKit": {"appletvos": (12, 0, 0)}, "TWAIN": {"macosx": (10, 2, 0)}, + "TabletopKit": { + "visionos": (2, 0, 0), + }, "TabularData": { "appletvos": (15, 0, 0), "iphoneos": (15, 0, 0), @@ -1016,6 +1072,13 @@ def _parse_version(version: str) -> (int, int, int): result[i] = components[i] return (result[0], result[1], result[2]) +def validate_sdk_frameworks(frameworks: list[str]) -> None: + for framework in frameworks: + if framework.startswith("$SDKROOT/System/Library/Frameworks"): + framework_name = framework[len("$SDKROOT/System/Library/Frameworks/"):-len(".framework")] + if framework_name not in FRAMEWORK_INTRODUCED_VERSIONS: + fail("Framework {} is missing version information".format(framework_name)) + def get_framework_linker_args(ctx: AnalysisContext, framework_names: list[str]) -> list[str]: if not has_apple_toolchain(ctx): return _get_unchecked_framework_linker_args(framework_names) @@ -1034,7 +1097,7 @@ def get_framework_linker_args(ctx: AnalysisContext, framework_names: list[str]) args = [] for name in framework_names: - versions = _FRAMEWORK_INTRODUCED_VERSIONS.get(name, None) + versions = FRAMEWORK_INTRODUCED_VERSIONS.get(name, None) if versions: introduced = versions.get(sdk_name, None) if not introduced: diff --git a/prelude/apple/apple_frameworks.bzl b/prelude/apple/apple_frameworks.bzl index 970c5c650aa..2957cbc6603 100644 --- a/prelude/apple/apple_frameworks.bzl +++ b/prelude/apple/apple_frameworks.bzl @@ -23,7 +23,7 @@ load( "merge_swiftmodule_linkables", ) load("@prelude//utils:expect.bzl", "expect") -load(":apple_framework_versions.bzl", "get_framework_linker_args") +load(":apple_framework_versions.bzl", "get_framework_linker_args", "validate_sdk_frameworks") load(":apple_toolchain_types.bzl", "AppleToolchainInfo") _IMPLICIT_SDKROOT_FRAMEWORK_SEARCH_PATHS = [ @@ -55,6 +55,7 @@ def _get_apple_frameworks_linker_flags(ctx: AnalysisContext, linkable: [Framewor return flags def get_framework_search_path_flags(ctx: AnalysisContext) -> cmd_args: + validate_sdk_frameworks(ctx.attrs.frameworks) unresolved_framework_dirs = _get_non_sdk_unresolved_framework_directories(ctx.attrs.frameworks) expanded_framework_dirs = _expand_sdk_framework_paths(ctx, unresolved_framework_dirs) return _get_framework_search_path_flags(expanded_framework_dirs) diff --git a/prelude/apple/apple_info_plist.bzl b/prelude/apple/apple_info_plist.bzl index 3b461f0b610..6e9c4d28cb8 100644 --- a/prelude/apple/apple_info_plist.bzl +++ b/prelude/apple/apple_info_plist.bzl @@ -7,7 +7,7 @@ load(":apple_bundle_destination.bzl", "AppleBundleDestination") load(":apple_bundle_part.bzl", "AppleBundlePart") -load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_product_name") +load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_default_binary_dep", "get_product_name") load(":apple_sdk.bzl", "get_apple_sdk_name") load( ":apple_sdk_metadata.bzl", @@ -18,9 +18,10 @@ load( "WatchSimulatorSdkMetadata", "get_apple_sdk_metadata_for_sdk_name", ) +load(":apple_target_sdk_version.bzl", "get_platform_name_for_sdk", "get_platform_version_for_sdk_version") load(":apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") -def process_info_plist(ctx: AnalysisContext, override_input: [Artifact, None]) -> AppleBundlePart: +def process_info_plist(ctx: AnalysisContext, override_input: Artifact | None) -> AppleBundlePart: input = _preprocess_info_plist(ctx) output = ctx.actions.declare_output("Info.plist") additional_keys = _additional_keys_as_json_file(ctx) @@ -64,7 +65,7 @@ def _preprocess_info_plist(ctx: AnalysisContext) -> Artifact: ctx.actions.run(command, category = "apple_preprocess_info_plist", **_get_plist_run_options()) return output -def _plist_substitutions_as_json_file(ctx: AnalysisContext) -> [Artifact, None]: +def _plist_substitutions_as_json_file(ctx: AnalysisContext) -> Artifact | None: info_plist_substitutions = ctx.attrs.info_plist_substitutions if not info_plist_substitutions: return None @@ -72,7 +73,7 @@ def _plist_substitutions_as_json_file(ctx: AnalysisContext) -> [Artifact, None]: substitutions_json = ctx.actions.write_json("plist_substitutions.json", info_plist_substitutions) return substitutions_json -def process_plist(ctx: AnalysisContext, input: Artifact, output: OutputArtifact, override_input: [Artifact, None] = None, additional_keys: [Artifact, None] = None, override_keys: [Artifact, None] = None, action_id: [str, None] = None): +def process_plist(ctx: AnalysisContext, input: Artifact, output: OutputArtifact, override_input: Artifact | None = None, additional_keys: Artifact | None = None, override_keys: Artifact | None = None, action_id: [str, None] = None): apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] processor = apple_tools.info_plist_processor override_input_arguments = ["--override-input", override_input] if override_input != None else [] @@ -94,14 +95,19 @@ def _additional_keys_as_json_file(ctx: AnalysisContext) -> Artifact: def _info_plist_additional_keys(ctx: AnalysisContext) -> dict[str, typing.Any]: sdk_name = get_apple_sdk_name(ctx) + platform_name = get_platform_name_for_sdk(sdk_name) sdk_metadata = get_apple_sdk_metadata_for_sdk_name(sdk_name) result = _extra_mac_info_plist_keys(sdk_metadata, ctx.attrs.extension) result["CFBundleSupportedPlatforms"] = sdk_metadata.info_plist_supported_platforms_values - result["DTPlatformName"] = sdk_name + result["DTPlatformName"] = platform_name sdk_version = ctx.attrs._apple_toolchain[AppleToolchainInfo].sdk_version if sdk_version: - result["DTPlatformVersion"] = sdk_version - result["DTSDKName"] = sdk_name + sdk_version + platform_version = get_platform_version_for_sdk_version( + sdk_name = sdk_name, + sdk_version = sdk_version, + ) + result["DTPlatformVersion"] = platform_version + result["DTSDKName"] = platform_name + platform_version sdk_build_version = ctx.attrs._apple_toolchain[AppleToolchainInfo].sdk_build_version if sdk_build_version: result["DTPlatformBuild"] = sdk_build_version @@ -112,7 +118,10 @@ def _info_plist_additional_keys(ctx: AnalysisContext) -> dict[str, typing.Any]: xcode_version = ctx.attrs._apple_toolchain[AppleToolchainInfo].xcode_version if xcode_version: result["DTXcode"] = xcode_version - result[sdk_metadata.min_version_plist_info_key] = get_bundle_min_target_version(ctx, ctx.attrs.binary) + result[sdk_metadata.min_version_plist_info_key] = get_platform_version_for_sdk_version( + sdk_name = sdk_name, + sdk_version = get_bundle_min_target_version(ctx, get_default_binary_dep(ctx.attrs.binary)), + ) identify_build_system = ctx.attrs._info_plist_identify_build_system_default if ctx.attrs.info_plist_identify_build_system != None: @@ -142,6 +151,9 @@ def _info_plist_override_keys(ctx: AnalysisContext) -> dict[str, typing.Any]: if sdk_name == MacOSXSdkMetadata.name: if ctx.attrs.extension != "xpc": result["LSRequiresIPhoneOS"] = False - elif sdk_name not in [WatchOSSdkMetadata.name, WatchSimulatorSdkMetadata.name, MacOSXCatalystSdkMetadata.name]: + elif sdk_name in [WatchOSSdkMetadata.name, WatchSimulatorSdkMetadata.name]: + result["UIDeviceFamily"] = [4] + result["WKApplication"] = True + elif sdk_name not in [MacOSXCatalystSdkMetadata.name]: result["LSRequiresIPhoneOS"] = True return result diff --git a/prelude/apple/apple_info_plist_substitutions_parsing.bzl b/prelude/apple/apple_info_plist_substitutions_parsing.bzl index 38fec6f4f33..c5e726fdb36 100644 --- a/prelude/apple/apple_info_plist_substitutions_parsing.bzl +++ b/prelude/apple/apple_info_plist_substitutions_parsing.bzl @@ -51,7 +51,7 @@ def _expand_codesign_entitlements_path(info_plist_substitutions: dict[str, str], path = prefix + maybe_value + suffix fail("Too many iteration (loop might be present) to expand `{}` with substitutions `{}`".format(path, info_plist_substitutions)) -def parse_codesign_entitlements(info_plist_substitutions: [dict[str, str], None]) -> [str, None]: +def parse_codesign_entitlements(info_plist_substitutions: [dict[str, str | Select], None]) -> [str, None]: if not info_plist_substitutions: return None maybe_path = info_plist_substitutions.get(_CODE_SIGN_ENTITLEMENTS_KEY) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index 2a3232e894d..70e1cbcf7ef 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -7,15 +7,21 @@ load( "@prelude//:artifact_tset.bzl", + "make_artifact_tset", "project_artifacts", ) +load("@prelude//:attrs_validators.bzl", "get_attrs_validators_outputs") +load("@prelude//:paths.bzl", "paths") load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//apple:apple_dsym.bzl", "DSYM_SUBTARGET", "get_apple_dsym") +load("@prelude//apple:apple_error_handler.bzl", "apple_build_error_handler") load("@prelude//apple:apple_stripping.bzl", "apple_strip_args") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") +# @oss-disable: load("@prelude//apple/meta_only:apple_library_meta_validation.bzl", "apple_library_validate_for_meta_restrictions") # @oss-disable: load("@prelude//apple/meta_only:linker_outputs.bzl", "add_extra_linker_outputs") load( "@prelude//apple/swift:swift_compilation.bzl", + "SwiftLibraryForDistributionOutput", # @unused Used as a type "compile_swift", "get_swift_anonymous_targets", "get_swift_debug_infos", @@ -32,7 +38,8 @@ load( ) load( "@prelude//cxx:compile.bzl", - "CxxSrcWithFlags", # @unused Used as a type + "AsmExtensions", + "CxxSrcCompileCommand", # @unused Used as a type ) load( "@prelude//cxx:cxx_library.bzl", @@ -44,7 +51,15 @@ load( "cxx_attr_deps", "cxx_attr_exported_deps", ) -load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") +load( + "@prelude//cxx:cxx_sources.bzl", + "CxxSrcWithFlags", # @unused Used as a type + "get_srcs_with_flags", +) +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", # @unused Used as type +) load( "@prelude//cxx:cxx_types.bzl", "CxxRuleAdditionalParams", @@ -52,7 +67,7 @@ load( "CxxRuleProviderParams", "CxxRuleSubTargetParams", ) -load("@prelude//cxx:headers.bzl", "cxx_attr_exported_headers") +load("@prelude//cxx:headers.bzl", "cxx_attr_exported_headers", "cxx_attr_headers_list") load( "@prelude//cxx:linker.bzl", "SharedLibraryFlagOverrides", @@ -63,16 +78,19 @@ load( "CPreprocessorArgs", "CPreprocessorInfo", # @unused Used as a type ) +load("@prelude//cxx:target_sdk_version.bzl", "get_unversioned_target_triple") load( "@prelude//linking:link_info.bzl", "LibOutputStyle", ) load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") +load("@prelude//apple/mockingbird/mockingbird_types.bzl", "MockingbirdLibraryInfo", "MockingbirdLibraryInfoTSet", "MockingbirdLibraryRecord", "MockingbirdSourcesInfo", "MockingbirdTargetType") load(":apple_bundle_types.bzl", "AppleBundleLinkerMapInfo", "AppleMinDeploymentVersionInfo") load(":apple_frameworks.bzl", "get_framework_search_path_flags") +load(":apple_library_types.bzl", "AppleLibraryInfo") load(":apple_modular_utility.bzl", "MODULE_CACHE_PATH") -load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") +load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node") load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback", "get_module_name") load( ":debug.bzl", @@ -85,9 +103,9 @@ load(":xcode.bzl", "apple_populate_xcode_attributes") load(":xctest_swift_support.bzl", "xctest_swift_support_info") AppleSharedLibraryMachOFileType = enum( - # dynamicly bound shared library file + # dynamically bound shared library file "dylib", - # dynamicly bound bundle file aka Mach-O bundle + # dynamically bound bundle file aka Mach-O bundle "bundle", ) @@ -113,7 +131,19 @@ AppleLibraryAdditionalParams = record( force_link_group_linking = field(bool, False), ) +AppleLibraryForDistributionInfo = provider( + fields = { + "module_name": str, + "private_swiftinterface": Artifact, + "swiftdoc": Artifact, + "swiftinterface": Artifact, + "target_triple": str, + }, +) + def apple_library_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: + # @oss-disable: apple_library_validate_for_meta_restrictions(ctx) + def get_apple_library_providers(deps_providers) -> list[Provider]: shared_type = AppleSharedLibraryMachOFileType(ctx.attrs.shared_library_macho_file_type) if shared_type == AppleSharedLibraryMachOFileType("bundle"): @@ -132,6 +162,7 @@ def apple_library_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: rule_type = "apple_library", generate_providers = CxxRuleProviderParams( java_packaging_info = False, + java_global_code_info = False, android_packageable_info = False, omnibus_root = False, # We generate a provider on our own, disable to avoid several providers of same type. @@ -142,15 +173,143 @@ def apple_library_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: deps_providers, ) output = cxx_library_parameterized(ctx, constructor_params) - return output.providers + + return output.providers + _make_mockingbird_library_info_provider(ctx) if uses_explicit_modules(ctx): return get_swift_anonymous_targets(ctx, get_apple_library_providers) else: return get_apple_library_providers([]) +def _compile_index_store(ctx: AnalysisContext, src_compile_cmd: CxxSrcCompileCommand, toolchain: CxxToolchainInfo, compile_cmd: cmd_args, pic: bool) -> Artifact | None: + identifier = src_compile_cmd.src.short_path + if src_compile_cmd.index != None: + # Add a unique postfix if we have duplicate source files with different flags + identifier = identifier + "_" + str(src_compile_cmd.index) + filename_base = identifier + identifier += " (index_store)" + + # We generate the index only for pic compilations + if not pic: + return None + + if src_compile_cmd.src.extension in AsmExtensions.values(): + return None + + cmd = compile_cmd.copy() + + # We use `-fsyntax-only` flag, so output will be not generated. + # The output here is used for the identifier of the index unit file + output_name = paths.join( + ctx.label.cell, + ctx.label.package, + ctx.label.name, + "{}.{}".format(filename_base, toolchain.linker_info.object_file_extension), + ) + cmd.add(["-o", output_name]) + + index_store = ctx.actions.declare_output(paths.join("__indexstore__", filename_base, "index_store"), dir = True) + + # Haven't use `-fdebug-prefix-map` for now, will use index-import to remap the path. But it's not ideal. + cmd.add([ + "-fsyntax-only", + "-index-ignore-system-symbols", + "-index-store-path", + index_store.as_output(), + ]) + + category = "apple_cxx_index_store" + ctx.actions.run( + cmd, + category = category, + identifier = identifier, + allow_cache_upload = True, + ) + + return index_store + +def _make_apple_library_for_distribution_info_provider(ctx: AnalysisContext, swift_library_for_distribution: [None, SwiftLibraryForDistributionOutput]) -> list[AppleLibraryForDistributionInfo]: + if not swift_library_for_distribution: + return [] + return [AppleLibraryForDistributionInfo( + target_triple = get_unversioned_target_triple(ctx).replace("macosx", "macos"), + swiftinterface = swift_library_for_distribution.swiftinterface, + private_swiftinterface = swift_library_for_distribution.private_swiftinterface, + swiftdoc = swift_library_for_distribution.swiftdoc, + module_name = get_module_name(ctx), + )] + +def _make_apple_library_info_provider(ctx: AnalysisContext, swift_header: [None, Artifact]) -> list[AppleLibraryInfo]: + public_framework_headers = cxx_attr_headers_list(ctx, ctx.attrs.public_framework_headers, [], get_apple_cxx_headers_layout(ctx)) + all_deps = cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx) + apple_library_infos = filter(None, [dep.get(AppleLibraryInfo) for dep in all_deps]) + + public_framework_header_tset = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = [header.artifact for header in public_framework_headers], + children = [apple_library.public_framework_headers for apple_library in apple_library_infos], + ) + + return [AppleLibraryInfo( + public_framework_headers = public_framework_header_tset, + swift_header = swift_header, + target = ctx.label, + labels = ctx.attrs.labels, + )] + +def _make_mockingbird_library_info_provider(ctx: AnalysisContext) -> list[MockingbirdLibraryInfo]: + _, swift_sources = _filter_swift_srcs(ctx) + + if len(swift_sources) == 0: + return [] + + deps_mockingbird_infos = filter(None, [dep.get(MockingbirdLibraryInfo) for dep in cxx_attr_deps(ctx)]) + exported_deps_mockingbird_infos = filter(None, [dep.get(MockingbirdLibraryInfo) for dep in cxx_attr_exported_deps(ctx)]) + + children = [] + dep_names = [] + exported_dep_names = [] + for info in deps_mockingbird_infos: + dep_names.append(info.name) + children.append(info.tset) + + for info in exported_deps_mockingbird_infos: + exported_dep_names.append(info.name) + children.append(info.tset) + + mockingbird_srcs_folder = ctx.actions.declare_output("mockingbird_srcs_" + ctx.attrs.name, dir = True) + + ctx.actions.symlinked_dir( + mockingbird_srcs_folder, + {source.file.basename: source.file for source in swift_sources}, + ) + + mockingbird_record = MockingbirdLibraryRecord( + name = ctx.attrs.name, + srcs = [src.file for src in swift_sources], + dep_names = dep_names, + exported_dep_names = exported_dep_names, + type = MockingbirdTargetType("library"), + src_dir = mockingbird_srcs_folder, + ) + + mockingbird_tset = ctx.actions.tset(MockingbirdLibraryInfoTSet, value = mockingbird_record, children = children) + + return [MockingbirdLibraryInfo( + name = ctx.attrs.name, + tset = mockingbird_tset, + )] + def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisContext, params: AppleLibraryAdditionalParams, deps_providers: list = [], is_test_target: bool = False) -> CxxRuleConstructorParams: - cxx_srcs, swift_srcs = _filter_swift_srcs(ctx) + mockingbird_gen_sources = [] + if not "dummy_library" in ctx.attrs.labels: + for dep in cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx): + if MockingbirdSourcesInfo in dep: + for src in dep[MockingbirdSourcesInfo].srcs: + mockingbird_gen_sources.append(src) + + cxx_srcs, swift_srcs = _filter_swift_srcs(ctx, mockingbird_gen_sources) # First create a modulemap if necessary. This is required for importing # ObjC code in Swift so must be done before Swift compilation. @@ -189,7 +348,7 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte else: exported_pre = None - swift_dependency_info = swift_compile.dependency_info if swift_compile else get_swift_dependency_info(ctx, None, None, deps_providers) + swift_dependency_info = swift_compile.dependency_info if swift_compile else get_swift_dependency_info(ctx, None, deps_providers) swift_debug_info = get_swift_debug_infos( ctx, swift_dependency_info, @@ -219,6 +378,8 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte contains_swift_sources = bool(swift_srcs) xctest_swift_support_provider = xctest_swift_support_info(ctx, contains_swift_sources, is_test_target) + attrs_validators_providers, attrs_validators_subtargets = get_attrs_validators_outputs(ctx) + def additional_providers_factory(propagated_exported_preprocessor_info: [CPreprocessorInfo, None]) -> list[Provider]: # Expose `SwiftPCMUncompiledInfo` which represents the ObjC part of a target, # if a target also has a Swift part, the provider will expose the generated `-Swift.h` header. @@ -231,23 +392,70 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte providers = [swift_pcm_uncompile_info] if swift_pcm_uncompile_info else [] providers.append(swift_dependency_info) providers.append(xctest_swift_support_provider) + providers.extend(attrs_validators_providers) + return providers framework_search_path_pre = CPreprocessor( - relative_args = CPreprocessorArgs(args = [framework_search_paths_flags]), + args = CPreprocessorArgs(args = [framework_search_paths_flags]), ) validation_deps_outputs = get_validation_deps_outputs(ctx) + if swift_compile: + swift_objc_header = swift_compile.exported_swift_header + swift_library_for_distribution_output = swift_compile.swift_library_for_distribution_output + else: + swift_objc_header = None + swift_library_for_distribution_output = None + + extra_apple_providers = [] + if not is_test_target: + extra_apple_providers = _make_apple_library_info_provider(ctx, swift_objc_header) + _make_apple_library_for_distribution_info_provider(ctx, swift_library_for_distribution_output) + + # Always provide a valid JSON object, so that tooling can depend on its existance + modulemap_info_json = {"modulemap": exported_pre.modulemap_path} if (exported_pre and exported_pre.modulemap_path) else {} + modulemap_info_json_file = ctx.actions.declare_output("modulemap-info.json") + modulemap_info_json_cmd_args = ctx.actions.write_json(modulemap_info_json_file, modulemap_info_json, with_inputs = True, pretty = True) + modulemap_info_providers = [DefaultInfo(default_output = modulemap_info_json_file, other_outputs = [modulemap_info_json_cmd_args])] + + subtargets = { + "modulemap-info": modulemap_info_providers, + "swift-compilation-database": [DefaultInfo(default_output = None)], + "swift-compile": [DefaultInfo(default_output = None)], + "swift-interface": [swift_interface], + "swiftmodule": [DefaultInfo(default_output = None)], + } + if swift_compile: + subtargets["swift-compilation-database"] = [ + DefaultInfo( + default_output = swift_compile.compilation_database.db, + other_outputs = [swift_compile.compilation_database.other_outputs], + ), + ] + subtargets["swift-compile"] = [DefaultInfo(default_outputs = swift_compile.object_files)] + + if swift_compile.output_map_artifact: + subtargets["swift-output-file-map"] = [DefaultInfo(default_output = swift_compile.output_map_artifact)] + + if swift_compile.swiftdeps: + subtargets["swiftdeps"] = [ + DefaultInfo( + default_output = swift_compile.swiftdeps[0], + other_outputs = swift_compile.swiftdeps[1:], + ), + ] + + subtargets["swiftmodule"] = [DefaultInfo(default_output = swift_compile.swiftmodule)] + return CxxRuleConstructorParams( rule_type = params.rule_type, is_test = (params.rule_type == "apple_test"), headers_layout = get_apple_cxx_headers_layout(ctx), extra_exported_link_flags = params.extra_exported_link_flags, extra_hidden = validation_deps_outputs, - extra_link_flags = [_get_linker_flags(ctx)], extra_link_input = swift_object_files, extra_link_input_has_external_debug_info = True, - extra_preprocessors = get_min_deployment_version_target_preprocessor_flags(ctx) + [swift_pre, modular_pre], + extra_preprocessors = [swift_pre, modular_pre], extra_exported_preprocessors = filter(None, [framework_search_path_pre, exported_pre]), srcs = cxx_srcs, additional = CxxRuleAdditionalParams( @@ -258,46 +466,37 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte # follow. static_external_debug_info = swift_debug_info.static, shared_external_debug_info = swift_debug_info.shared, - subtargets = { - "swift-compilation-database": [ - DefaultInfo( - default_output = swift_compile.compilation_database.db if swift_compile else None, - other_outputs = [swift_compile.compilation_database.other_outputs] if swift_compile else [], - ), - ], - "swift-compile": [ - DefaultInfo( - default_outputs = swift_compile.object_files if swift_compile else None, - ), - ], - "swift-interface": [swift_interface], - "swift-output-file-map": [ - DefaultInfo( - default_output = swift_compile.output_map_artifact if swift_compile else None, - ), - ], - "swiftmodule": [ - DefaultInfo( - default_output = swift_compile.swiftmodule if swift_compile else None, - ), - ], - }, + subtargets = subtargets | attrs_validators_subtargets, additional_providers_factory = additional_providers_factory, + external_debug_info_tags = [], # This might be used to materialise all transitive Swift related object files with ArtifactInfoTag("swiftmodule") ), - output_style_sub_targets_and_providers_factory = _get_link_style_sub_targets_and_providers, + output_style_sub_targets_and_providers_factory = _get_link_style_sub_targets_and_providers(extra_apple_providers), shared_library_flags = params.shared_library_flags, # apple_library's 'stripped' arg only applies to shared subtargets, or, # targets with 'preferred_linkage = "shared"' strip_executable = get_apple_stripped_attr_value_with_default_fallback(ctx), strip_args_factory = apple_strip_args, force_link_group_linking = params.force_link_group_linking, - cxx_populate_xcode_attributes_func = lambda local_ctx, **kwargs: _xcode_populate_attributes(ctx = local_ctx, populate_xcode_attributes_func = params.populate_xcode_attributes_func, **kwargs), + cxx_populate_xcode_attributes_func = lambda local_ctx, **kwargs: _xcode_populate_attributes(ctx = local_ctx, populate_xcode_attributes_func = params.populate_xcode_attributes_func, contains_swift_sources = contains_swift_sources, **kwargs), generate_sub_targets = params.generate_sub_targets, generate_providers = params.generate_providers, # Some apple rules rely on `static` libs *not* following dependents. link_groups_force_static_follows_dependents = False, extra_linker_outputs_factory = _get_extra_linker_flags_and_outputs, swiftmodule_linkable = get_swiftmodule_linkable(swift_compile), + extra_shared_library_interfaces = [swift_compile.exported_symbols] if (swift_compile and swift_compile.exported_symbols) else None, + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, + swift_objc_header = swift_objc_header, + error_handler = apple_build_error_handler, + index_store_factory = _compile_index_store, + index_stores = swift_compile.index_stores if swift_compile else None, ) def _get_extra_linker_flags_and_outputs( @@ -306,10 +505,10 @@ def _get_extra_linker_flags_and_outputs( # @oss-disable: return add_extra_linker_outputs(ctx) return [], {} # @oss-enable -def _filter_swift_srcs(ctx: AnalysisContext) -> (list[CxxSrcWithFlags], list[CxxSrcWithFlags]): +def _filter_swift_srcs(ctx: AnalysisContext, additional_srcs: list = []) -> (list[CxxSrcWithFlags], list[CxxSrcWithFlags]): cxx_srcs = [] swift_srcs = [] - for s in get_srcs_with_flags(ctx): + for s in get_srcs_with_flags(ctx, additional_srcs): if s.file.extension == SWIFT_EXTENSION: swift_srcs.append(s) else: @@ -318,76 +517,79 @@ def _filter_swift_srcs(ctx: AnalysisContext) -> (list[CxxSrcWithFlags], list[Cxx return cxx_srcs, swift_srcs def _get_link_style_sub_targets_and_providers( - output_style: LibOutputStyle, - ctx: AnalysisContext, - output: [CxxLibraryOutput, None]) -> (dict[str, list[Provider]], list[Provider]): - # We always propagate a resource graph regardless of link style or empty output - resource_graph = create_resource_graph( - ctx = ctx, - labels = ctx.attrs.labels, - deps = cxx_attr_deps(ctx), - exported_deps = cxx_attr_exported_deps(ctx), - # Shared libraries should not propagate their resources to rdeps, - # they should only be contained in their frameworks apple_bundle. - should_propagate = output_style != LibOutputStyle("shared_lib"), - ) - if output_style != LibOutputStyle("shared_lib") or output == None: - return ({}, [resource_graph]) + extra_providers: list[Provider]) -> typing.Callable: + def get_link_style_sub_targets_impl( + output_style: LibOutputStyle, + ctx: AnalysisContext, + output: [CxxLibraryOutput, None]) -> (dict[str, list[Provider]], list[Provider]): + # We always propagate a resource graph regardless of link style or empty output + resource_graph = create_resource_graph( + ctx = ctx, + labels = ctx.attrs.labels, + deps = cxx_attr_deps(ctx), + exported_deps = cxx_attr_exported_deps(ctx), + # Shared libraries should not propagate their resources to rdeps, + # they should only be contained in their frameworks apple_bundle. + should_propagate = output_style != LibOutputStyle("shared_lib"), + ) - min_version = get_min_deployment_version_for_node(ctx) - min_version_providers = [AppleMinDeploymentVersionInfo(version = min_version)] + if output_style != LibOutputStyle("shared_lib") or output == None: + return ({}, [resource_graph] + extra_providers) - debug_info = project_artifacts( - actions = ctx.actions, - tsets = [output.external_debug_info], - ) + min_version = get_min_deployment_version_for_node(ctx) + min_version_providers = [AppleMinDeploymentVersionInfo(version = min_version)] + + debug_info = project_artifacts( + actions = ctx.actions, + tsets = [output.external_debug_info], + ) - if get_apple_stripped_attr_value_with_default_fallback(ctx): - if False: - # TODO(nga): `output.unstripped` is never `None`. - def unknown(): - pass + if get_apple_stripped_attr_value_with_default_fallback(ctx): + if False: + # TODO(nga): `output.unstripped` is never `None`. + def unknown(): + pass - output = unknown() - expect(output.unstripped != None, "Expecting unstripped output to be non-null when stripping is enabled.") - dsym_executable = output.unstripped - else: - dsym_executable = output.default - dsym_artifact = get_apple_dsym( - ctx = ctx, - executable = dsym_executable, - debug_info = debug_info, - action_identifier = dsym_executable.short_path, - ) - debug_info_artifacts_manifest = ctx.actions.write( - "debuginfo.artifacts", - debug_info, - with_inputs = True, - ) - subtargets = { - DSYM_SUBTARGET: [DefaultInfo(default_output = dsym_artifact)], - DEBUGINFO_SUBTARGET: [DefaultInfo(default_output = debug_info_artifacts_manifest)], - } - providers = [ - AppleDebuggableInfo(dsyms = [dsym_artifact], debug_info_tset = output.external_debug_info), - resource_graph, - ] + min_version_providers + output = unknown() + expect(output.unstripped != None, "Expecting unstripped output to be non-null when stripping is enabled.") + dsym_executable = output.unstripped + else: + dsym_executable = output.default + dsym_artifact = get_apple_dsym( + ctx = ctx, + executable = dsym_executable, + debug_info = debug_info, + action_identifier = dsym_executable.short_path, + ) + debug_info_artifacts_manifest = ctx.actions.write( + "debuginfo.artifacts", + debug_info, + with_inputs = True, + ) + subtargets = { + DSYM_SUBTARGET: [DefaultInfo(default_output = dsym_artifact)], + DEBUGINFO_SUBTARGET: [DefaultInfo(default_output = debug_info_artifacts_manifest)], + } + providers = [ + AppleDebuggableInfo(dsyms = [dsym_artifact], debug_info_tset = output.external_debug_info), + resource_graph, + ] + min_version_providers + extra_providers - if output.linker_map != None: - subtargets["linker-map"] = [DefaultInfo(default_output = output.linker_map.map, other_outputs = [output.linker_map.binary])] - providers += [AppleBundleLinkerMapInfo(linker_maps = [output.linker_map.map])] + if output.linker_map != None: + subtargets["linker-map"] = [DefaultInfo(default_output = output.linker_map.map, other_outputs = [output.linker_map.binary])] + providers += [AppleBundleLinkerMapInfo(linker_maps = [output.linker_map.map])] - return (subtargets, providers) + return (subtargets, providers) -def _get_linker_flags(ctx: AnalysisContext) -> cmd_args: - return cmd_args(get_min_deployment_version_target_linker_flags(ctx)) + return get_link_style_sub_targets_impl def _xcode_populate_attributes( ctx, srcs: list[CxxSrcWithFlags], argsfiles: dict[str, CompileArgsfile], populate_xcode_attributes_func: typing.Callable, + contains_swift_sources: bool, **_kwargs) -> dict[str, typing.Any]: # Overwrite the product name - data = populate_xcode_attributes_func(ctx, srcs = srcs, argsfiles = argsfiles, product_name = ctx.attrs.name) + data = populate_xcode_attributes_func(ctx, srcs = srcs, argsfiles = argsfiles, product_name = ctx.attrs.name, contains_swift_sources = contains_swift_sources) return data diff --git a/prelude/apple/apple_library_types.bzl b/prelude/apple/apple_library_types.bzl new file mode 100644 index 00000000000..6d0af422dd1 --- /dev/null +++ b/prelude/apple/apple_library_types.bzl @@ -0,0 +1,20 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load( + "@prelude//:artifact_tset.bzl", + "ArtifactTSet", +) + +AppleLibraryInfo = provider( + fields = { + "labels": list[str], + "public_framework_headers": ArtifactTSet, + "swift_header": [Artifact, None], + "target": Label, + }, +) diff --git a/prelude/apple/apple_macro_layer.bzl b/prelude/apple/apple_macro_layer.bzl index ffb2735f4d7..75092f37a9c 100644 --- a/prelude/apple/apple_macro_layer.bzl +++ b/prelude/apple/apple_macro_layer.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//apple/user:apple_ipa_package.bzl", "make_apple_ipa_package_target") load(":apple_bundle_config.bzl", "apple_bundle_config") load(":apple_dsym_config.bzl", "apple_dsym_config") load(":apple_info_plist_substitutions_parsing.bzl", "parse_codesign_entitlements") @@ -32,6 +33,13 @@ APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE = AppleBuckConfigAttributeOverride( skip_if_false = True, ) +APPLE_LINK_LIBRARIES_REMOTELY_OVERRIDE = AppleBuckConfigAttributeOverride( + name = "link_execution_preference", + key = "link_libraries_remotely_override", + value_if_true = "remote", + skip_if_false = True, +) + APPLE_STRIPPED_DEFAULT = AppleBuckConfigAttributeOverride( name = "_stripped_default", key = "stripped_default", @@ -40,20 +48,29 @@ APPLE_STRIPPED_DEFAULT = AppleBuckConfigAttributeOverride( _APPLE_LIBRARY_LOCAL_EXECUTION_OVERRIDES = [ APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE, + APPLE_LINK_LIBRARIES_REMOTELY_OVERRIDE, AppleBuckConfigAttributeOverride(name = APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME, key = "archive_objects_locally_override"), ] -_APPLE_BINARY_LOCAL_EXECUTION_OVERRIDES = [ +# If both configs are set the last one wins +_APPLE_BINARY_EXECUTION_OVERRIDES = [ AppleBuckConfigAttributeOverride( name = "link_execution_preference", key = "link_binaries_locally_override", value_if_true = "local", skip_if_false = True, ), + AppleBuckConfigAttributeOverride( + name = "link_execution_preference", + key = "link_binaries_remotely_override", + value_if_true = "remote", + skip_if_false = True, + ), ] _APPLE_TEST_LOCAL_EXECUTION_OVERRIDES = [ APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE, + APPLE_LINK_LIBRARIES_REMOTELY_OVERRIDE, ] def apple_macro_layer_set_bool_override_attrs_from_config(overrides: list[AppleBuckConfigAttributeOverride]) -> dict[str, Select]: @@ -105,10 +122,14 @@ def apple_library_macro_impl(apple_library_rule = None, **kwargs): kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config([APPLE_STRIPPED_DEFAULT])) apple_library_rule(**kwargs) +def prebuilt_apple_framework_macro_impl(prebuilt_apple_framework_rule = None, **kwargs): + kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config([APPLE_STRIPPED_DEFAULT])) + prebuilt_apple_framework_rule(**kwargs) + def apple_binary_macro_impl(apple_binary_rule = None, apple_universal_executable = None, **kwargs): dsym_args = apple_dsym_config() kwargs.update(dsym_args) - kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_BINARY_LOCAL_EXECUTION_OVERRIDES)) + kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_BINARY_EXECUTION_OVERRIDES)) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config([APPLE_STRIPPED_DEFAULT])) original_binary_name = kwargs.pop("name") @@ -129,9 +150,10 @@ def apple_binary_macro_impl(apple_binary_rule = None, apple_universal_executable apple_binary_rule(name = binary_name, **kwargs) -def apple_package_macro_impl(apple_package_rule = None, **kwargs): +def apple_package_macro_impl(apple_package_rule = None, apple_ipa_package_rule = None, **kwargs): kwargs.update(apple_package_config()) apple_package_rule( + _ipa_package = make_apple_ipa_package_target(apple_ipa_package_rule, **kwargs), **kwargs ) diff --git a/prelude/apple/apple_native.bzl b/prelude/apple/apple_native.bzl new file mode 100644 index 00000000000..a64ccd68bd8 --- /dev/null +++ b/prelude/apple/apple_native.bzl @@ -0,0 +1,96 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:prelude.bzl", "native") +load( + "@prelude//platforms/apple:constants.bzl", + "APPLE", +) +load("@prelude//platforms/apple:platforms.bzl", "config_backed_apple_target_platform", "get_default_target_platform_for_platform", "set_apple_platforms") +load("@prelude//platforms/apple/platforms_map.bzl", "APPLE_SDK_DEFAULT_PLATFORM_MAP") +load("@prelude//utils/buckconfig.bzl", "read") + +def _apple_library(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_library(**kwargs) + +def _apple_asset_catalog(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_asset_catalog(**kwargs) + +def _apple_binary(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_binary(**kwargs) + +def _apple_bundle(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_bundle(**kwargs) + +def _apple_watchos_bundle(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_watchos_bundle(**kwargs) + +def _apple_package(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_package(**kwargs) + +def _apple_resource(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_resource(**kwargs) + +def _apple_test(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_test(**kwargs) + +def _apple_xcuitest(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_xcuitest(**kwargs) + +def _apple_xcframework(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_xcframework(**kwargs) + +def _update_platforms(**kwargs): + platform = _get_default_platform() + + default_target_platform = kwargs.pop("default_target_platform", None) + base_config_backed_target_platform = kwargs.pop("config_backed_target_platform", None) + + if default_target_platform != None and base_config_backed_target_platform != None: + name = kwargs.get("name", "UNKNOWN_TARGET") + fail("{} has both a default_target_platform and a config_backed_target_platform, which is not allowed".format(name)) + + if base_config_backed_target_platform != None: + default_target_platform = config_backed_apple_target_platform(base_config_backed_target_platform, platform) + elif default_target_platform == None: + default_target_platform = get_default_target_platform_for_platform(platform) + + if default_target_platform != None: + kwargs["default_target_platform"] = default_target_platform + + kwargs = set_apple_platforms(platform, base_config_backed_target_platform, kwargs) + + return kwargs + +def _get_default_platform(): + config_platform = read("cxx", "default_platform") + if config_platform != None: + return config_platform + return APPLE_SDK_DEFAULT_PLATFORM_MAP.get(APPLE) + +apple_native = struct( + apple_asset_catalog = _apple_asset_catalog, + apple_binary = _apple_binary, + apple_bundle = _apple_bundle, + apple_watchos_bundle = _apple_watchos_bundle, + apple_library = _apple_library, + apple_package = _apple_package, + apple_resource = _apple_resource, + apple_test = _apple_test, + apple_xcuitest = _apple_xcuitest, + apple_xcframework = _apple_xcframework, +) diff --git a/prelude/apple/apple_package.bzl b/prelude/apple/apple_package.bzl index a539e44c835..c0e56d0aacb 100644 --- a/prelude/apple/apple_package.bzl +++ b/prelude/apple/apple_package.bzl @@ -5,14 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:paths.bzl", "paths") -load("@prelude//utils:arglike.bzl", "ArgLike") -load(":apple_bundle_destination.bzl", "AppleBundleDestination", "bundle_relative_path_for_destination") -load(":apple_bundle_types.bzl", "AppleBundleInfo") +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolsInfo") load(":apple_package_config.bzl", "IpaCompressionLevel") -load(":apple_sdk.bzl", "get_apple_sdk_name") -load(":apple_swift_stdlib.bzl", "should_copy_swift_stdlib") -load(":apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") def apple_package_impl(ctx: AnalysisContext) -> list[Provider]: package_name = ctx.attrs.package_name if ctx.attrs.package_name else ctx.attrs.bundle.label.name @@ -44,7 +38,7 @@ def apple_package_impl(ctx: AnalysisContext) -> list[Provider]: prepackaged_validators_artifacts = _get_prepackaged_validators_outputs(ctx, contents) if prepackaged_validators_artifacts: # Add the artifacts to packaging cmd so that they are run. - process_ipa_cmd.hidden(prepackaged_validators_artifacts) + process_ipa_cmd.add(cmd_args(hidden = prepackaged_validators_artifacts)) sub_targets["prepackaged_validators"] = [ DefaultInfo(default_outputs = prepackaged_validators_artifacts), ] @@ -56,6 +50,13 @@ def apple_package_impl(ctx: AnalysisContext) -> list[Provider]: sub_targets = sub_targets, )] +def _get_ipa_contents(ctx: AnalysisContext) -> Artifact: + ipa_package_dep = ctx.attrs._ipa_package + default_outputs = ipa_package_dep[DefaultInfo].default_outputs + if len(default_outputs) != 1: + fail("Expect exactly one output for .ipa package") + return default_outputs[0] + def _get_default_package_cmd(ctx: AnalysisContext, unprocessed_ipa_contents: Artifact, output: OutputArtifact) -> cmd_args: apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] process_ipa_cmd = cmd_args([ @@ -70,96 +71,6 @@ def _get_default_package_cmd(ctx: AnalysisContext, unprocessed_ipa_contents: Art return process_ipa_cmd -def _get_ipa_contents(ctx: AnalysisContext) -> Artifact: - bundle = ctx.attrs.bundle - app = bundle[DefaultInfo].default_outputs[0] - - contents = { - paths.join("Payload", app.basename): app, - } - - apple_bundle_info = bundle[AppleBundleInfo] - if (not apple_bundle_info.skip_copying_swift_stdlib) and should_copy_swift_stdlib(app.extension): - swift_support_path = paths.join("SwiftSupport", get_apple_sdk_name(ctx)) - contents[swift_support_path] = _get_swift_support_dir(ctx, app, apple_bundle_info) - - if apple_bundle_info.contains_watchapp: - contents["Symbols"] = _build_symbols_dir(ctx) - - return ctx.actions.copied_dir( - "__unzipped_ipa_contents__", - contents, - ) - -def _build_symbols_dir(ctx) -> Artifact: - symbols_dir = ctx.actions.declare_output("__symbols__", dir = True) - ctx.actions.run( - cmd_args(["mkdir", "-p", symbols_dir.as_output()]), - category = "watchos_symbols_dir", - ) - - return symbols_dir - -def _get_swift_support_dir(ctx, bundle_output: Artifact, bundle_info: AppleBundleInfo) -> Artifact: - stdlib_tool = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info.swift_stdlib_tool - sdk_name = get_apple_sdk_name(ctx) - - # .app -> app - # This is the way the input is expected. - extension = bundle_output.extension[1:] - swift_support_dir = ctx.actions.declare_output("__swift_dylibs__", dir = True) - script, _ = ctx.actions.write( - "build_swift_support.sh", - [ - cmd_args("set -euo pipefail"), - cmd_args(swift_support_dir, format = "mkdir -p {}"), - cmd_args( - [ - stdlib_tool, - # If you're debugging, you can pass the '--verbose' flag here. - "--copy", - "--scan-executable", - cmd_args( - [ - bundle_output, - bundle_relative_path_for_destination(AppleBundleDestination("executables"), sdk_name, extension), - bundle_info.binary_name, - ], - delimiter = "/", - ), - _get_scan_folder_args(AppleBundleDestination("plugins"), bundle_output, sdk_name, extension), - _get_scan_folder_args(AppleBundleDestination("frameworks"), bundle_output, sdk_name, extension), - _get_scan_folder_args(AppleBundleDestination("appclips"), bundle_output, sdk_name, extension), - "--destination", - swift_support_dir, - ], - delimiter = " ", - quote = "shell", - ), - ], - allow_args = True, - ) - ctx.actions.run( - cmd_args(["/bin/sh", script]).hidden([stdlib_tool, bundle_output, swift_support_dir.as_output()]), - category = "copy_swift_stdlibs", - ) - - return swift_support_dir - -def _get_scan_folder_args(dest: AppleBundleDestination, bundle_output: Artifact, sdk_name, extension) -> ArgLike: - return cmd_args( - [ - "--scan-folder", - cmd_args( - [ - bundle_output, - bundle_relative_path_for_destination(dest, sdk_name, extension), - ], - delimiter = "/", - ), - ], - ) - def _compression_level_arg(compression_level: IpaCompressionLevel) -> str: if compression_level.value == "none": return "0" diff --git a/prelude/apple/xcode_prebuild_script.bzl b/prelude/apple/apple_platforms.bzl similarity index 75% rename from prelude/apple/xcode_prebuild_script.bzl rename to prelude/apple/apple_platforms.bzl index dc56f17ef92..c9beeaa1ad9 100644 --- a/prelude/apple/xcode_prebuild_script.bzl +++ b/prelude/apple/apple_platforms.bzl @@ -5,5 +5,4 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -def xcode_prebuild_script_impl(_ctx: AnalysisContext) -> list[Provider]: - return [DefaultInfo()] +APPLE_PLATFORMS_KEY = "_apple_platforms" diff --git a/prelude/apple/apple_resource.bzl b/prelude/apple/apple_resource.bzl index 7955fe1eaa6..ac300a3e3d9 100644 --- a/prelude/apple/apple_resource.bzl +++ b/prelude/apple/apple_resource.bzl @@ -5,9 +5,33 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load( + "@prelude//ide_integrations:xcode.bzl", + "XCODE_DATA_SUB_TARGET", + "generate_xcode_data", +) load(":apple_resource_types.bzl", "AppleResourceDestination", "AppleResourceSpec") load(":resource_groups.bzl", "create_resource_graph") +def _artifacts(deps: (list[[Artifact, Dependency]])) -> list[Artifact]: + artifacts = [] + for dep in deps: + if isinstance(dep, Dependency): + artifacts.extend(dep[DefaultInfo].default_outputs) + else: + artifacts.append(dep) + return artifacts + +def _xcode_populate_attributes(ctx) -> dict[str, typing.Any]: + data = { + "product_name": ctx.attrs.name.replace(".", "_"), + } + artifacts = _artifacts(ctx.attrs.files) + if artifacts: + data["extra_xcode_files"] = artifacts + + return data + def apple_resource_impl(ctx: AnalysisContext) -> list[Provider]: destination = ctx.attrs.destination or "resources" resource_spec = AppleResourceSpec( @@ -18,6 +42,8 @@ def apple_resource_impl(ctx: AnalysisContext) -> list[Provider]: variant_files = ctx.attrs.variants or [], named_variant_files = ctx.attrs.named_variants or {}, codesign_files_on_copy = ctx.attrs.codesign_on_copy, + codesign_entitlements = ctx.attrs.codesign_entitlements, + codesign_flags_override = ctx.attrs.codesign_flags_override, ) # `files` can contain `apple_library()` which in turn can have `apple_resource()` deps @@ -30,10 +56,13 @@ def apple_resource_impl(ctx: AnalysisContext) -> list[Provider]: exported_deps = [], resource_spec = resource_spec, ) + xcode_data_default_info, xcode_data_info = generate_xcode_data(ctx, "apple_resource", None, _xcode_populate_attributes) + return [DefaultInfo( sub_targets = { "headers": [ DefaultInfo(default_outputs = []), ], + XCODE_DATA_SUB_TARGET: xcode_data_default_info, }, - ), graph] + ), graph, xcode_data_info] diff --git a/prelude/apple/apple_resource_bundle.bzl b/prelude/apple/apple_resource_bundle.bzl index 2ea6fae2947..e014a9bd58d 100644 --- a/prelude/apple/apple_resource_bundle.bzl +++ b/prelude/apple/apple_resource_bundle.bzl @@ -38,11 +38,12 @@ # +------>| Binary |<--------+ # +-------------------+ -load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_system_identification_attrs") +load("@prelude//apple:apple_rules_impl_utility.bzl", "get_apple_info_plist_build_system_identification_attrs") _RESOURCE_BUNDLE_FIELDS = [ "asset_catalogs_compilation_options", "binary", + "copy_public_framework_headers", "default_target_platform", "deps", "extension", @@ -50,6 +51,7 @@ _RESOURCE_BUNDLE_FIELDS = [ "ibtool_module_flag", "info_plist", "info_plist_substitutions", + "module_map", "product_name", "privacy_manifest", "resource_group", @@ -78,7 +80,10 @@ def make_resource_bundle_rule(apple_resource_bundle_rule, **kwargs) -> [None, st resource_bundle_name = kwargs["name"] + "__ResourceBundle_Private" resource_bundle_kwargs = { + "compatible_with": kwargs.get("compatible_with"), + "exec_compatible_with": kwargs.get("exec_compatible_with"), "labels": ["generated"], + "target_compatible_with": kwargs.get("target_compatible_with"), "_bundle_target_name": kwargs["name"], "_compile_resources_locally_override": kwargs["_compile_resources_locally_override"], } diff --git a/prelude/apple/apple_resource_dedupe_alias.bzl b/prelude/apple/apple_resource_dedupe_alias.bzl new file mode 100644 index 00000000000..c7c7a0b845b --- /dev/null +++ b/prelude/apple/apple_resource_dedupe_alias.bzl @@ -0,0 +1,21 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//apple:apple_common.bzl", "apple_common") +load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") +load("@prelude//apple/user/apple_resource_transition.bzl", "apple_resource_transition") + +def _apple_resource_dedupe_alias_impl(ctx: AnalysisContext) -> list[Provider]: + return ctx.attrs.actual.providers + +registration_spec = RuleRegistrationSpec( + name = "apple_resource_dedupe_alias", + impl = _apple_resource_dedupe_alias_impl, + attrs = { + "actual": attrs.transition_dep(cfg = apple_resource_transition), + } | apple_common.skip_universal_resource_dedupe_arg(), +) diff --git a/prelude/apple/apple_resource_types.bzl b/prelude/apple/apple_resource_types.bzl index 3104682fbd3..d78994036a1 100644 --- a/prelude/apple/apple_resource_types.bzl +++ b/prelude/apple/apple_resource_types.bzl @@ -28,11 +28,14 @@ AppleResourceSpec = record( # `{ "ru.lproj" : ["Localizable.strings"] }` named_variant_files = field(dict[str, list[Artifact]], {}), codesign_files_on_copy = field(bool, False), + codesign_entitlements = field(Artifact | None, None), + codesign_flags_override = field(list[str] | None, None), ) -# Used when invoking `ibtool`, `actool` and `momc` +# Used when invoking `ibtool`, `actool`, `mapc` and `momc` AppleResourceProcessingOptions = record( prefer_local = field(bool, False), + prefer_remote = field(bool, False), allow_cache_upload = field(bool, False), ) diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index bf22e48b2ce..934912cb3e3 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -5,24 +5,26 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:attrs_validators.bzl", "ATTRS_VALIDATORS_NAME", "ATTRS_VALIDATORS_TYPE") load( "@prelude//:validation_deps.bzl", "VALIDATION_DEPS_ATTR_NAME", "VALIDATION_DEPS_ATTR_TYPE", ) +load("@prelude//apple:apple_common.bzl", "apple_common") load("@prelude//apple/swift:swift_incremental_support.bzl", "SwiftCompilationMode") load("@prelude//apple/swift:swift_toolchain.bzl", "swift_toolchain_impl") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftObjectFormat") -load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") +load("@prelude//apple/user:apple_xcframework.bzl", "apple_xcframework_extra_attrs") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo", "HeaderMode") -load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") -load("@prelude//decls/common.bzl", "Linkage") +load("@prelude//linking:types.bzl", "Linkage") load(":apple_asset_catalog.bzl", "apple_asset_catalog_impl") load(":apple_binary.bzl", "apple_binary_impl") load(":apple_bundle.bzl", "apple_bundle_impl") -load(":apple_bundle_types.bzl", "AppleBundleInfo") +load(":apple_bundle_types.bzl", "AppleBundleInfo", "ApplePackageExtension") load(":apple_core_data.bzl", "apple_core_data_impl") load(":apple_library.bzl", "AppleSharedLibraryMachOFileType", "apple_library_impl") load(":apple_package.bzl", "apple_package_impl") @@ -33,22 +35,18 @@ load( "APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME", "apple_bundle_extra_attrs", "apple_dsymutil_attrs", - "apple_test_extra_attrs", "apple_xcuitest_extra_attrs", - "get_apple_bundle_toolchain_attr", "get_apple_toolchain_attr", "get_apple_xctoolchain_attr", "get_apple_xctoolchain_bundle_id_attr", + "get_enable_library_evolution", ) load(":apple_test.bzl", "apple_test_impl") load(":apple_toolchain.bzl", "apple_toolchain_impl") load(":apple_toolchain_types.bzl", "AppleToolsInfo") -load(":apple_universal_executable.bzl", "apple_universal_executable_impl") load(":apple_xcuitest.bzl", "apple_xcuitest_impl") load(":prebuilt_apple_framework.bzl", "prebuilt_apple_framework_impl") load(":scene_kit_assets.bzl", "scene_kit_assets_impl") -load(":xcode_postbuild_script.bzl", "xcode_postbuild_script_impl") -load(":xcode_prebuild_script.bzl", "xcode_prebuild_script_impl") implemented_rules = { "apple_asset_catalog": apple_asset_catalog_impl, @@ -59,94 +57,83 @@ implemented_rules = { "apple_resource": apple_resource_impl, "apple_test": apple_test_impl, "apple_toolchain": apple_toolchain_impl, - "apple_universal_executable": apple_universal_executable_impl, "apple_xcuitest": apple_xcuitest_impl, "core_data_model": apple_core_data_impl, "prebuilt_apple_framework": prebuilt_apple_framework_impl, "scene_kit_assets": scene_kit_assets_impl, "swift_toolchain": swift_toolchain_impl, - "xcode_postbuild_script": xcode_postbuild_script_impl, - "xcode_prebuild_script": xcode_prebuild_script_impl, } _APPLE_TOOLCHAIN_ATTR = get_apple_toolchain_attr() -ApplePackageExtension = enum( - "ipa", - "pkg", - "dmg", - "zip", -) - def _apple_binary_extra_attrs(): attribs = { "binary_linker_flags": attrs.list(attrs.arg(), default = []), + "dist_thin_lto_codegen_flags": attrs.list(attrs.arg(), default = []), "enable_distributed_thinlto": attrs.bool(default = False), + "enable_library_evolution": attrs.option(attrs.bool(), default = None), "extra_xcode_sources": attrs.list(attrs.source(allow_directory = True), default = []), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "prefer_stripped_objects": attrs.bool(default = False), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), + "propagated_target_sdk_version": attrs.option(attrs.string(), default = None), "sanitizer_runtime_enabled": attrs.option(attrs.bool(), default = None), "stripped": attrs.option(attrs.bool(), default = None), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), + "swift_package_name": attrs.option(attrs.string(), default = None), "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), "_apple_xctoolchain": get_apple_xctoolchain_attr(), "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), + "_enable_library_evolution": get_enable_library_evolution(), "_stripped_default": attrs.bool(default = False), VALIDATION_DEPS_ATTR_NAME: VALIDATION_DEPS_ATTR_TYPE, + ATTRS_VALIDATORS_NAME: ATTRS_VALIDATORS_TYPE, } attribs.update(apple_dsymutil_attrs()) return attribs def _apple_library_extra_attrs(): attribs = { + "dist_thin_lto_codegen_flags": attrs.list(attrs.arg(), default = []), + "enable_distributed_thinlto": attrs.bool(default = False), + "enable_library_evolution": attrs.option(attrs.bool(), default = None), "extra_xcode_sources": attrs.list(attrs.source(allow_directory = True), default = []), "header_mode": attrs.option(attrs.enum(HeaderMode.values()), default = None), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), - "preferred_linkage": attrs.enum(Linkage, default = "any"), - "serialize_debugging_options": attrs.bool(default = True), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), + "propagated_target_sdk_version": attrs.option(attrs.string(), default = None), # Mach-O file type for binary when the target is built as a shared library. "shared_library_macho_file_type": attrs.enum(AppleSharedLibraryMachOFileType.values(), default = "dylib"), "stripped": attrs.option(attrs.bool(), default = None), "supports_header_symlink_subtarget": attrs.bool(default = False), "supports_shlib_interfaces": attrs.bool(default = True), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), + "swift_package_name": attrs.option(attrs.string(), default = None), "use_archive": attrs.option(attrs.bool(), default = None), "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), "_apple_xctoolchain": get_apple_xctoolchain_attr(), "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), + "_enable_library_evolution": get_enable_library_evolution(), "_stripped_default": attrs.bool(default = False), APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), + ATTRS_VALIDATORS_NAME: ATTRS_VALIDATORS_TYPE, VALIDATION_DEPS_ATTR_NAME: VALIDATION_DEPS_ATTR_TYPE, } attribs.update(apple_dsymutil_attrs()) return attribs -def _apple_universal_executable_extra_attrs(): - attribs = { - "executable": attrs.split_transition_dep(cfg = cpu_split_transition), - "executable_name": attrs.option(attrs.string(), default = None), - "labels": attrs.list(attrs.string(), default = []), - "split_arch_dsym": attrs.bool(default = False), - "universal": attrs.option(attrs.bool(), default = None), - "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, - "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), - } - attribs.update(apple_dsymutil_attrs()) - return attribs - extra_attributes = { "apple_asset_catalog": { "dirs": attrs.list(attrs.source(allow_directory = True), default = []), - }, + } | apple_common.skip_universal_resource_dedupe_arg(), "apple_binary": _apple_binary_extra_attrs(), "apple_bundle": apple_bundle_extra_attrs(), "apple_library": _apple_library_extra_attrs(), @@ -163,18 +150,18 @@ extra_attributes = { ), default = [], ), - "_apple_toolchain": get_apple_bundle_toolchain_attr(), "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), "_ipa_compression_level": attrs.enum(IpaCompressionLevel.values()), + "_ipa_package": attrs.dep(), }, "apple_resource": { + "codesign_entitlements": attrs.option(attrs.source(), default = None), + "codesign_flags_override": attrs.option(attrs.list(attrs.string()), default = None), "codesign_on_copy": attrs.bool(default = False), "content_dirs": attrs.list(attrs.source(allow_directory = True), default = []), "dirs": attrs.list(attrs.source(allow_directory = True), default = []), "files": attrs.list(attrs.one_of(attrs.dep(), attrs.source()), default = []), - "skip_universal_resource_dedupe": attrs.bool(default = False), - }, - "apple_test": apple_test_extra_attrs(), + } | apple_common.skip_universal_resource_dedupe_arg(), "apple_toolchain": { # The Buck v1 attribute specs defines those as `attrs.source()` but # we want to properly handle any runnable tools that might have @@ -183,7 +170,7 @@ extra_attributes = { "codesign": attrs.exec_dep(providers = [RunInfo]), "codesign_allocate": attrs.exec_dep(providers = [RunInfo]), "codesign_identities_command": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), - # Controls invocations of `ibtool`, `actool` and `momc` + # Controls invocations of `ibtool`, `actool` `mapc` and `momc` "compile_resources_locally": attrs.bool(default = False), "copy_scene_kit_assets": attrs.exec_dep(providers = [RunInfo]), "cxx_toolchain": attrs.toolchain_dep(), @@ -191,13 +178,13 @@ extra_attributes = { "dwarfdump": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "extra_linker_outputs": attrs.set(attrs.string(), default = []), "ibtool": attrs.exec_dep(providers = [RunInfo]), - "installer": attrs.default_only(attrs.label(default = "buck//src/com/facebook/buck/installer/apple:apple_installer")), + "installer": attrs.default_only(attrs.label(default = "fbsource//xplat/toolchains/android/sdk/src/com/facebook/buck/installer/apple:apple_installer")), "libtool": attrs.exec_dep(providers = [RunInfo]), "lipo": attrs.exec_dep(providers = [RunInfo]), - "min_version": attrs.option(attrs.string(), default = None), + "mapc": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "merge_index_store": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//apple/tools/index:merge_index_store")), "momc": attrs.exec_dep(providers = [RunInfo]), "objdump": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), - "odrcov": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), # A placeholder tool that can be used to set up toolchain constraints. # Useful when fat and thin toolchahins share the same underlying tools via `command_alias()`, # which requires setting up separate platform-specific aliases with the correct constraints. @@ -220,21 +207,26 @@ extra_attributes = { # pass abs paths during development and using the currently selected Xcode. "_internal_sdk_path": attrs.option(attrs.string(), default = None), }, - "apple_universal_executable": _apple_universal_executable_extra_attrs(), + "apple_xcframework": apple_xcframework_extra_attrs(), "apple_xcuitest": apple_xcuitest_extra_attrs(), "core_data_model": { + "module": attrs.option(attrs.string(), default = None), "path": attrs.source(allow_directory = True), }, "prebuilt_apple_framework": { + "dsyms": attrs.list(attrs.source(allow_directory = True), default = []), "framework": attrs.option(attrs.source(allow_directory = True), default = None), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), + "stripped": attrs.option(attrs.bool(), default = None), "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, + "_apple_tools": attrs.default_only(attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo])), + "_stripped_default": attrs.bool(default = False), }, "scene_kit_assets": { "path": attrs.source(allow_directory = True), }, "swift_library": { - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), }, "swift_toolchain": { "architecture": attrs.option(attrs.string(), default = None), # TODO(T115173356): Make field non-optional diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 57fe14c1b9b..1447d931786 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -5,19 +5,19 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_system_identification_attrs") +load("@prelude//:attrs_validators.bzl", "ATTRS_VALIDATORS_NAME", "ATTRS_VALIDATORS_TYPE") load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo", "AppleBundleTypeAttributeType") -load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignType") +load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignConfiguration", "CodeSignType") +load("@prelude//apple:apple_common.bzl", "apple_common") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") load("@prelude//apple/swift:swift_incremental_support.bzl", "SwiftCompilationMode") load("@prelude//apple/user:apple_selective_debugging.bzl", "AppleSelectiveDebuggingInfo") -load("@prelude//apple/user:apple_simulators.bzl", "apple_simulators_transition") load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") -load("@prelude//apple/user:resource_group_map.bzl", "resource_group_map_attr") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") -load("@prelude//decls/common.bzl", "LinkableDepType", "Linkage") +load("@prelude//utils/clear_platform.bzl", "clear_platform_transition") def get_apple_toolchain_attr(): # FIXME: prelude// should be standalone (not refer to fbcode//) @@ -35,6 +35,32 @@ def get_apple_xctoolchain_bundle_id_attr(): # FIXME: prelude// should be standalone (not refer to fbcode//) return attrs.toolchain_dep(default = "fbcode//buck2/platform/toolchain:apple-xctoolchain-bundle-id") +def get_enable_library_evolution(): + return attrs.bool(default = select({ + "DEFAULT": False, + "config//features/apple:swift_library_evolution_enabled": True, + })) + +def _get_enable_dsym_uses_parallel_linker(): + return attrs.bool(default = select({ + "DEFAULT": False, + "config//features/apple:dsym_uses_parallel_linker_enabled": True, + })) + +def _strict_provisioning_profile_search_default_attr(): + default_value = (read_root_config("apple", "strict_provisioning_profile_search", "true").lower() == "true") + return attrs.bool(default = select({ + "DEFAULT": default_value, + "config//features/apple:strict_provisioning_profile_search_enabled": True, + })) + +def _fast_adhoc_signing_enabled_default_attr(): + return attrs.bool(default = select({ + "DEFAULT": True, + "config//features/apple:fast_adhoc_signing_disabled": False, + "config//features/apple:fast_adhoc_signing_enabled": True, + })) + APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME = "_archive_objects_locally_override" APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME = "_use_entitlements_when_adhoc_code_signing" APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME = "use_entitlements_when_adhoc_code_signing" @@ -46,29 +72,43 @@ APPLE_VALIDATION_DEPS_ATTR_TYPE = attrs.set(attrs.dep(), sorted = True, default def apple_dsymutil_attrs(): return { + "dsym_uses_parallel_linker": _get_enable_dsym_uses_parallel_linker(), "_dsymutil_extra_flags": attrs.list(attrs.string()), + "_dsymutil_verify_dwarf": attrs.string(), + } + +def get_apple_info_plist_build_system_identification_attrs(): + return { + "info_plist_identify_build_system": attrs.option(attrs.bool(), default = None), + "_info_plist_identify_build_system_default": attrs.bool(default = False), } def _apple_bundle_like_common_attrs(): # `apple_bundle()` and `apple_test()` share a common set of extra attrs attribs = { "codesign_type": attrs.option(attrs.enum(CodeSignType.values()), default = None), + "fast_adhoc_signing_enabled": attrs.option(attrs.bool(), default = None), + "provisioning_profile_filter": attrs.option(attrs.string(), default = None), + "strict_provisioning_profile_search": attrs.option(attrs.bool(), default = None), + "versioned_macos_bundle": attrs.bool(default = False), "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), "_apple_xctoolchain": get_apple_xctoolchain_attr(), "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), "_bundling_cache_buster": attrs.option(attrs.string(), default = None), "_bundling_log_file_enabled": attrs.bool(default = False), "_bundling_log_file_level": attrs.option(attrs.string(), default = None), + "_code_signing_configuration": attrs.option(attrs.enum(CodeSignConfiguration.values()), default = None), + "_codesign_identities_command_override": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "_codesign_type": attrs.option(attrs.enum(CodeSignType.values()), default = None), "_compile_resources_locally_override": attrs.option(attrs.bool(), default = None), - "_dry_run_code_signing": attrs.bool(default = False), - "_fast_adhoc_signing_enabled": attrs.bool(default = False), + "_fast_adhoc_signing_enabled_default": _fast_adhoc_signing_enabled_default_attr(), "_fast_provisioning_profile_parsing_enabled": attrs.bool(default = False), "_incremental_bundling_enabled": attrs.bool(default = False), "_profile_bundling_enabled": attrs.bool(default = False), # FIXME: prelude// should be standalone (not refer to fbsource//) "_provisioning_profiles": attrs.dep(default = "fbsource//xplat/buck2/platform/apple:provisioning_profiles"), "_resource_bundle": attrs.option(attrs.dep(providers = [AppleBundleResourceInfo]), default = None), + "_strict_provisioning_profile_search_default": _strict_provisioning_profile_search_default_attr(), APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME: attrs.bool(default = False), APPLE_EMBED_PROVISIONING_PROFILE_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), @@ -84,19 +124,17 @@ def apple_test_extra_attrs(): # wrap this test library into an `apple_bundle`. Because of this, `apple_test` has attributes # from both `apple_library` and `apple_bundle`. attribs = { + ATTRS_VALIDATORS_NAME: ATTRS_VALIDATORS_TYPE, # Expected by `apple_bundle`, for `apple_test` this field is always None. "binary": attrs.option(attrs.dep(), default = None), + "enable_library_evolution": attrs.option(attrs.bool(), default = None), # The resulting test bundle should have .xctest extension. "extension": attrs.string(), "extra_xcode_sources": attrs.list(attrs.source(allow_directory = True), default = []), "link_execution_preference": link_execution_preference_attr(), "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), - # Used to create the shared test library. Any library deps whose `preferred_linkage` isn't "shared" will - # be treated as "static" deps and linked into the shared test library. - "link_style": attrs.enum(LinkableDepType, default = "static"), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), - # The test source code and lib dependencies should be built into a shared library. - "preferred_linkage": attrs.enum(Linkage, default = "shared"), + "propagated_target_sdk_version": attrs.option(attrs.string(), default = None), # Expected by `apple_bundle`, for `apple_test` this field is always None. "resource_group": attrs.option(attrs.string(), default = None), # Expected by `apple_bundle`, for `apple_test` this field is always None. @@ -104,11 +142,20 @@ def apple_test_extra_attrs(): "sanitizer_runtime_enabled": attrs.option(attrs.bool(), default = None), "stripped": attrs.bool(default = False), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), - "use_m1_simulator": attrs.bool(default = False), + "swift_package_name": attrs.option(attrs.string(), default = None), + "test_re_capabilities": attrs.option(attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), default = None, doc = """ + An optional dictionary with the RE capabilities for the test execution. + Overrides a default selection mechanism. + """), + "test_re_use_case": attrs.option(attrs.string(), default = None, doc = """ + An optional name of the RE use case for the test execution. + Overrides a default selection mechanism. + """), "_apple_toolchain": get_apple_toolchain_attr(), - "_ios_booted_simulator": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:ios_booted_simulator", providers = [LocalResourceInfo]), - "_ios_unbooted_simulator": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:ios_unbooted_simulator", providers = [LocalResourceInfo]), - "_macos_idb_companion": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:macos_idb_companion", providers = [LocalResourceInfo]), + "_enable_library_evolution": get_enable_library_evolution(), + "_ios_booted_simulator": attrs.transition_dep(cfg = clear_platform_transition, default = "fbsource//xplat/buck2/platform/apple:ios_booted_simulator", providers = [LocalResourceInfo]), + "_ios_unbooted_simulator": attrs.transition_dep(cfg = clear_platform_transition, default = "fbsource//xplat/buck2/platform/apple:ios_unbooted_simulator", providers = [LocalResourceInfo]), + "_macos_idb_companion": attrs.transition_dep(cfg = clear_platform_transition, default = "fbsource//xplat/buck2/platform/apple:macos_idb_companion", providers = [LocalResourceInfo]), } attribs.update(_apple_bundle_like_common_attrs()) return attribs @@ -118,6 +165,7 @@ def apple_xcuitest_extra_attrs(): # This is ignored, but required for info plist processing. "binary": attrs.option(attrs.source(), default = None), "codesign_identity": attrs.option(attrs.string(), default = None), + "enable_library_evolution": attrs.option(attrs.bool(), default = None), "entitlements_file": attrs.option(attrs.source(), default = None), "extension": attrs.default_only(attrs.string(default = "app")), "incremental_bundling_enabled": attrs.bool(default = False), @@ -127,22 +175,36 @@ def apple_xcuitest_extra_attrs(): # The test bundle to package in the UI test runner app. "test_bundle": attrs.dep(), "_apple_toolchain": get_apple_toolchain_attr(), + "_enable_library_evolution": get_enable_library_evolution(), } attribs.update(_apple_bundle_like_common_attrs()) attribs.pop("_dsymutil_extra_flags", None) + attribs.pop("_dsymutil_verify_dwarf", None) return attribs +def _embed_xctest_frameworks_default_value(): + return select({ + "DEFAULT": False, + # Xcode copies XCTest frameworks to test host apps, required when the + # selected Xcode version != Xcode version used to build an app under test + "config//marker/apple/constraints:embed_xctest_frameworks_enabled": True, + }) + def apple_bundle_extra_attrs(): attribs = { "binary": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), "bundle_type": attrs.option(attrs.enum(AppleBundleTypeAttributeType.values()), default = None), - "resource_group_map": resource_group_map_attr(), + "copy_public_framework_headers": attrs.option(attrs.bool(), default = None), + "embed_xctest_frameworks": attrs.bool(default = _embed_xctest_frameworks_default_value()), + "module_map": attrs.option(attrs.source(), default = None), + "propagated_target_sdk_version": attrs.option(attrs.string(), default = None), + "resource_group_map": RESOURCE_GROUP_MAP_ATTR, "selective_debugging": attrs.option(attrs.dep(providers = [AppleSelectiveDebuggingInfo]), default = None), "split_arch_dsym": attrs.bool(default = False), "universal": attrs.option(attrs.bool(), default = None), "_apple_toolchain": get_apple_bundle_toolchain_attr(), "_codesign_entitlements": attrs.option(attrs.source(), default = None), - } + } | apple_common.debug_artifacts_validators_arg() attribs.update(_apple_bundle_like_common_attrs()) return attribs diff --git a/prelude/apple/apple_static_archive.bzl b/prelude/apple/apple_static_archive.bzl new file mode 100644 index 00000000000..e43c4575caa --- /dev/null +++ b/prelude/apple/apple_static_archive.bzl @@ -0,0 +1,119 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:artifact_tset.bzl", "make_artifact_tset") +load("@prelude//:validation_deps.bzl", "VALIDATION_DEPS_ATTR_NAME", "VALIDATION_DEPS_ATTR_TYPE", "get_validation_deps_outputs") +load("@prelude//apple:apple_library.bzl", "AppleLibraryForDistributionInfo") +load("@prelude//apple:apple_library_types.bzl", "AppleLibraryInfo") +load("@prelude//apple:apple_rules_impl_utility.bzl", "get_apple_toolchain_attr") +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") +load("@prelude//linking:link_info.bzl", "LinkStrategy", "get_link_args_for_strategy", "unpack_link_args") +load("@prelude//linking:linkables.bzl", "linkables") +load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") +load("@prelude//utils:arglike.bzl", "ArgLike") + +def _apple_static_archive_impl(ctx: AnalysisContext) -> list[Provider]: + libtool = ctx.attrs._apple_toolchain[AppleToolchainInfo].libtool + archive_name = ctx.attrs.name if ctx.attrs.archive_name == None else ctx.attrs.archive_name + output = ctx.actions.declare_output(archive_name) + + artifacts = _get_static_link_args(ctx) + validation_deps_outputs = get_validation_deps_outputs(ctx) + + #TODO(T193127271): Support thin archives + cmd = cmd_args([libtool, "-static", "-o", output.as_output(), artifacts], hidden = validation_deps_outputs or []) + ctx.actions.run(cmd, category = "libtool", identifier = output.short_path) + + providers = [DefaultInfo(default_output = output), _get_apple_library_info(ctx)] + _get_apple_library_for_distribution_info(ctx) + + return providers + +def _get_apple_library_for_distribution_info(ctx: AnalysisContext) -> list[AppleLibraryForDistributionInfo]: + if ctx.attrs.distribution_flat_dep != None: + apple_library_for_distribution = ctx.attrs.distribution_flat_dep.get(AppleLibraryForDistributionInfo) + if apple_library_for_distribution != None: + return [apple_library_for_distribution] + return [] + +def _get_apple_library_info(ctx: AnalysisContext) -> AppleLibraryInfo: + all_flat_deps = filter(None, ctx.attrs.flat_deps + [ctx.attrs.distribution_flat_dep]) + flat_apple_library_infos = filter(None, [dep.get(AppleLibraryInfo) for dep in all_flat_deps]) + flat_public_framework_headers = [] + for apple_library_info in flat_apple_library_infos: + tset = apple_library_info.public_framework_headers._tset + if tset != None: + for headers in tset.value: + flat_public_framework_headers += headers.artifacts + + flat_header_tset = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = flat_public_framework_headers, + ) + + apple_library_infos = filter(None, [dep.get(AppleLibraryInfo) for dep in ctx.attrs.deps]) + public_framework_header_tset = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + children = [apple_library.public_framework_headers for apple_library in apple_library_infos] + [flat_header_tset], + ) + + swift_header = None + if ctx.attrs.distribution_flat_dep != None: + distribution_flat_dep_apple_library_info = ctx.attrs.distribution_flat_dep.get(AppleLibraryInfo) + if distribution_flat_dep_apple_library_info: + swift_header = distribution_flat_dep_apple_library_info.swift_header + + return AppleLibraryInfo( + public_framework_headers = public_framework_header_tset, + swift_header = swift_header, + target = ctx.label, + labels = ctx.attrs.labels, + ) + +def _get_static_link_args(ctx: AnalysisContext) -> list[ArgLike]: + args = [] + + for dep in ctx.attrs.flat_deps: + default_info = dep.get(DefaultInfo) + if default_info == None: + continue + default_outputs = default_info.default_outputs + if len(default_outputs) > 0: + args.append(default_outputs[0]) + + if ctx.attrs.distribution_flat_dep: + default_info = ctx.attrs.distribution_flat_dep.get(DefaultInfo) + if default_info != None: + default_outputs = default_info.default_outputs + if len(default_outputs) > 0: + args.append(default_outputs[0]) + + args = dedupe(args) + + transitive_link_args = get_link_args_for_strategy( + ctx, + [x.merged_link_info for x in linkables(ctx.attrs.deps)], + LinkStrategy("static"), + ) + args.append(unpack_link_args(transitive_link_args)) + + return args + +registration_spec = RuleRegistrationSpec( + name = "apple_static_archive", + impl = _apple_static_archive_impl, + attrs = { + "archive_name": attrs.option(attrs.string(), default = None), + "deps": attrs.list(attrs.dep(), default = []), + "distribution_flat_dep": attrs.option(attrs.dep(), default = None), + "flat_deps": attrs.list(attrs.dep(), default = []), + "labels": attrs.list(attrs.string(), default = []), + VALIDATION_DEPS_ATTR_NAME: VALIDATION_DEPS_ATTR_TYPE, + "_apple_toolchain": get_apple_toolchain_attr(), + }, +) diff --git a/prelude/apple/apple_target_sdk_version.bzl b/prelude/apple/apple_target_sdk_version.bzl index 4b1290384ef..8229b948ff1 100644 --- a/prelude/apple/apple_target_sdk_version.bzl +++ b/prelude/apple/apple_target_sdk_version.bzl @@ -5,76 +5,69 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") -load("@prelude//cxx:preprocessor.bzl", "CPreprocessor", "CPreprocessorArgs") -load(":apple_sdk.bzl", "get_apple_sdk_name") +load("@prelude//cxx:target_sdk_version.bzl", "get_target_sdk_version") -# TODO(T112099448): In the future, the min version flag should live on the apple_toolchain() -# TODO(T113776898): Switch to -mtargetos= flag which should live on the apple_toolchain() -_APPLE_MIN_VERSION_FLAG_SDK_MAP = { - "iphoneos": "-mios-version-min", - "iphonesimulator": "-mios-simulator-version-min", - "maccatalyst": "-mios-version-min", # Catalyst uses iOS min version flags - "macosx": "-mmacosx-version-min", - "watchos": "-mwatchos-version-min", - "watchsimulator": "-mwatchsimulator-version-min", +_MACCATALYST_IOS_TO_MACOS_VERSION_MAP = { + "13.0": "10.15", # Catalina + "13.1": "10.15", + "13.2": "10.15.1", + "13.3": "10.15.2", + "13.4": "10.15.4", + "13.5": "10.15.5", + "13.6": "10.15.5", # Xcode reported 10.15 + "14.0": "11.0", # Big Sur + "14.1": "11.0", + "14.2": "11.0", + "14.3": "11.1", + "14.4": "11.2", + "14.5": "11.3", + "14.6": "11.4", + "14.7": "11.5", + "15.0": "12.0", # Monterey + "15.1": "12.0", # Xcode reported 10.15 + "15.2": "12.1", + "15.3": "12.2", + "15.4": "12.3", + "15.5": "12.4", + "15.6": "12.5", + "16.0": "13.0", # Ventura + "16.1": "13.0", + "16.2": "13.1", + "16.3": "13.2", + "16.4": "13.3", + "16.5": "13.4", + "16.6": "13.5", + "17.0": "14.0", # Sonoma + "17.1": "14.1", + "17.2": "14.2", + "17.3": "14.3", + "17.4": "14.4", + "17.5": "14.5", + "18.0": "15.0", + "18.1": "15.1", } -# Returns the target SDK version for apple_(binary|library) and uses -# apple_toolchain() min version as a fallback. This is the central place -# where the version for a particular node is defined, no other places -# should be accessing `attrs.target_sdk_version` or `attrs.min_version`. -def get_min_deployment_version_for_node(ctx: AnalysisContext) -> [None, str]: - toolchain_min_version = ctx.attrs._apple_toolchain[AppleToolchainInfo].min_version - if toolchain_min_version == "": - toolchain_min_version = None - return getattr(ctx.attrs, "target_sdk_version", None) or toolchain_min_version - -# Returns the min deployment flag to pass to the compiler + linker -def _get_min_deployment_version_target_flag(ctx: AnalysisContext) -> [None, str]: - target_sdk_version = get_min_deployment_version_for_node(ctx) - if target_sdk_version == None: - return None - - sdk_name = get_apple_sdk_name(ctx) - min_version_flag = _APPLE_MIN_VERSION_FLAG_SDK_MAP.get(sdk_name) - if min_version_flag == None: - fail("Could not determine min version flag for SDK {}".format(sdk_name)) +_SDK_NAME_TO_PLATFORM_NAME_OVERRIDE_MAP = { + "maccatalyst": "macosx", +} - return "{}={}".format(min_version_flag, target_sdk_version) +def get_platform_version_for_sdk_version(sdk_name: str, sdk_version: str) -> str: + if sdk_name == "maccatalyst": + macos_version = _MACCATALYST_IOS_TO_MACOS_VERSION_MAP.get(sdk_version, None) + if macos_version == None: + fail("No macos version for maccatalyst version {}".format(sdk_version)) + return macos_version -# There are two main ways in which we can pass target SDK version: -# - versioned target triple -# - unversioned target triple + version flag -# -# A versioned target triple overrides any version flags and requires -# additional flags to disable the warning/error (`-Woverriding-t-option`), -# so we prefer to use an unversioned target triple + version flag. -# -# Furthermore, we want to ensure that there's _exactly one_ version flag -# on a compiler/link line. This makes debugging easier and avoids issues -# with multiple layers each adding/overriding target SDK. It also makes -# it easier to switch to versioned target triple. -# -# There are exactly two ways in which to specify the target SDK: -# - apple_toolchain.min_version sets the default value -# - apple_(binary|library).target_sdk_version sets the per-target value -# -# apple_toolchain() rules should _never_ add any version flags because -# the rule does _not_ know whether a particular target will request a -# non-default value. Otherwise, we end up with multiple version flags, -# one added by the toolchain and then additional overrides by targets. + return sdk_version -def get_min_deployment_version_target_linker_flags(ctx: AnalysisContext) -> list[str]: - min_version_flag = _get_min_deployment_version_target_flag(ctx) - return [min_version_flag] if min_version_flag != None else [] +def get_platform_name_for_sdk(sdk_name: str) -> str: + return _SDK_NAME_TO_PLATFORM_NAME_OVERRIDE_MAP.get(sdk_name, sdk_name) -def get_min_deployment_version_target_preprocessor_flags(ctx: AnalysisContext) -> list[CPreprocessor]: - min_version_flag = _get_min_deployment_version_target_flag(ctx) - if min_version_flag == None: - return [] +# Returns the target_sdk_version specified for this build, falling +# back to the toolchain version when unset. +def get_min_deployment_version_for_node(ctx: AnalysisContext) -> str: + version = get_target_sdk_version(ctx) + if version == None: + fail("No target_sdk_version set on target or toolchain") - args = cmd_args(min_version_flag) - return [CPreprocessor( - relative_args = CPreprocessorArgs(args = [args]), - )] + return version diff --git a/prelude/apple/apple_test.bzl b/prelude/apple/apple_test.bzl index 251283a0091..4c87390412d 100644 --- a/prelude/apple/apple_test.bzl +++ b/prelude/apple/apple_test.bzl @@ -5,9 +5,13 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:paths.bzl", "paths") +load( + "@prelude//:artifact_tset.bzl", + "project_artifacts", +) load("@prelude//apple:apple_library.bzl", "AppleLibraryAdditionalParams", "apple_library_rule_constructor_params_and_swift_providers") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") +load("@prelude//apple:apple_xctest_frameworks_utility.bzl", "get_xctest_frameworks_bundle_parts") # @oss-disable: load("@prelude//apple/meta_only:apple_test_re_capabilities.bzl", "ios_test_re_capabilities", "macos_test_re_capabilities") # @oss-disable: load("@prelude//apple/meta_only:apple_test_re_use_case.bzl", "apple_test_re_use_case") load("@prelude//apple/swift:swift_compilation.bzl", "get_swift_anonymous_targets", "uses_explicit_modules") @@ -15,16 +19,17 @@ load( "@prelude//cxx:argsfiles.bzl", "CompileArgsfile", # @unused Used as a type ) +load("@prelude//cxx:cxx_library.bzl", "cxx_library_parameterized") load( - "@prelude//cxx:compile.bzl", + "@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type ) -load("@prelude//cxx:cxx_library.bzl", "cxx_library_parameterized") load("@prelude//cxx:cxx_types.bzl", "CxxRuleProviderParams", "CxxRuleSubTargetParams") load( "@prelude//cxx:linker.bzl", "SharedLibraryFlagOverrides", ) +load("@prelude//ide_integrations:xcode.bzl", "XcodeDataInfoKeys") load( "@prelude//utils:dicts.bzl", "flatten_x", @@ -42,16 +47,19 @@ load( ":apple_sdk_metadata.bzl", "MacOSXSdkMetadata", ) -load(":debug.bzl", "DEBUGINFO_SUBTARGET") +load(":debug.bzl", "AppleDebuggableInfo") load(":xcode.bzl", "apple_populate_xcode_attributes") load(":xctest_swift_support.bzl", "XCTestSwiftSupportInfo") +_XCTOOLCHAIN_SUB_TARGET = "xctoolchain" + def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: def get_apple_test_providers(deps_providers) -> list[Provider]: xctest_bundle = bundle_output(ctx) test_host_app_bundle = _get_test_host_app_bundle(ctx) test_host_app_binary = _get_test_host_app_binary(ctx, test_host_app_bundle) + ui_test_target_app_bundle = _get_ui_test_target_app_bundle(ctx) objc_bridging_header_flags = [ # Disable bridging header -> PCH compilation to mitigate an issue in Xcode 13 beta. @@ -66,7 +74,7 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: # any xctests altogether, provided the test dylib is adhoc signed shared_library_flags += entitlements_link_flags(ctx) - # The linker will incluide adhoc signature for ARM64 by default, lets + # The linker will include adhoc signature for ARM64 by default, lets # ensure we always have an adhoc signature regardless of arch/linker logic. shared_library_flags += ["-Wl,-adhoc_codesign"] @@ -92,7 +100,7 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: compilation_database = True, default = False, linkable_graph = False, - link_style_outputs = False, + link_style_outputs = True, merged_native_link_info = False, omnibus_root = False, preprocessors = False, @@ -111,7 +119,11 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: ) cxx_library_output = cxx_library_parameterized(ctx, constructor_params) - test_binary_output = ctx.actions.declare_output(get_product_name(ctx)) + + # Locate the temporary binary that is bundled into the xctest in a binaries directory. When Xcode loads the test out of the target's output dir, + # it will utilize a binary with the test name from the output dir instead of the xctest bundle. Which then results in paths to test resources + # being incorrect. Locating the temporary binary elsewhere works around this issue. + test_binary_output = ctx.actions.declare_output("__binaries__", get_product_name(ctx)) # Rename in order to generate dSYM with correct binary name (dsymutil doesn't provide a way to control binary name in output dSYM bundle). test_binary = ctx.actions.copy_file(test_binary_output, cxx_library_output.default_output.default) @@ -120,13 +132,23 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: part_list_output = get_apple_bundle_part_list(ctx, AppleBundlePartListConstructorParams(binaries = [binary_part])) xctest_swift_support_needed = None + debug_info = None + cxx_providers = [] for p in cxx_library_output.providers: if isinstance(p, XCTestSwiftSupportInfo): xctest_swift_support_needed = p.support_needed - break + elif isinstance(p, AppleDebuggableInfo): + debug_info = project_artifacts(ctx.actions, [p.debug_info_tset]) + elif isinstance(p, ValidationInfo): + cxx_providers.append(p) expect(xctest_swift_support_needed != None, "Expected `XCTestSwiftSupportInfo` provider to be present") + expect(debug_info != None, "Expected `AppleDebuggableInfo` provider to be present") - bundle_parts = part_list_output.parts + _get_xctest_framework(ctx, xctest_swift_support_needed) + bundle_parts = part_list_output.parts + if not ctx.attrs.embed_xctest_frameworks_in_test_host_app: + # The XCTest frameworks should only be embedded in a single place, + # either the test host (as per Xcode) or in the test itself + bundle_parts += get_xctest_frameworks_bundle_parts(ctx, xctest_swift_support_needed) for sanitizer_runtime_dylib in cxx_library_output.sanitizer_runtime_files: frameworks_destination = AppleBundleDestination("frameworks") @@ -141,7 +163,7 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: primary_binary_rel_path = get_apple_bundle_part_relative_destination_path(ctx, binary_part) swift_stdlib_args = SwiftStdlibArguments(primary_binary_rel_path = primary_binary_rel_path) - sub_targets = assemble_bundle( + bundle_result = assemble_bundle( ctx, xctest_bundle, bundle_parts, @@ -150,44 +172,48 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: # Adhoc signing can be skipped because the test executable is adhoc signed # + includes any entitlements if present. skip_adhoc_signing = True, + incremental_bundling_override = False, ) - + sub_targets = bundle_result.sub_targets sub_targets.update(cxx_library_output.sub_targets) - (debuginfo,) = sub_targets[DEBUGINFO_SUBTARGET] + dsym_artifact = get_apple_dsym( ctx = ctx, executable = test_binary, - debug_info = debuginfo.default_outputs, + debug_info = debug_info, action_identifier = "generate_apple_test_dsym", output_path_override = get_bundle_dir_name(ctx) + ".dSYM", ) sub_targets[DSYM_SUBTARGET] = [DefaultInfo(default_output = dsym_artifact)] - # If the test has a test host, add a subtarget to build the test host app bundle. + # If the test has a test host and a ui test target, add the subtargets to build the app bundles. sub_targets["test-host"] = [DefaultInfo(default_output = test_host_app_bundle)] if test_host_app_bundle else [DefaultInfo()] + sub_targets["ui-test-target"] = [DefaultInfo(default_output = ui_test_target_app_bundle)] if ui_test_target_app_bundle else [DefaultInfo()] sub_targets[DWARF_AND_DSYM_SUBTARGET] = [ - DefaultInfo(default_output = xctest_bundle, other_outputs = [dsym_artifact]), - _get_test_info(ctx, xctest_bundle, test_host_app_bundle, dsym_artifact), + DefaultInfo(default_output = xctest_bundle, other_outputs = [dsym_artifact], sub_targets = {_XCTOOLCHAIN_SUB_TARGET: ctx.attrs._apple_xctoolchain.providers}), + _get_test_info(ctx, xctest_bundle, test_host_app_bundle, dsym_artifact, ui_test_target_app_bundle), ] + sub_targets[_XCTOOLCHAIN_SUB_TARGET] = ctx.attrs._apple_xctoolchain.providers + return [ DefaultInfo(default_output = xctest_bundle, sub_targets = sub_targets), - _get_test_info(ctx, xctest_bundle, test_host_app_bundle), + _get_test_info(ctx, xctest_bundle, test_host_app_bundle, ui_test_target_app_bundle = ui_test_target_app_bundle), cxx_library_output.xcode_data_info, cxx_library_output.cxx_compilationdb_info, - ] + ] + bundle_result.providers + cxx_providers if uses_explicit_modules(ctx): return get_swift_anonymous_targets(ctx, get_apple_test_providers) else: return get_apple_test_providers([]) -def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_bundle: [Artifact, None], dsym_artifact: [Artifact, None] = None) -> Provider: +def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_bundle: Artifact | None, dsym_artifact: Artifact | None = None, ui_test_target_app_bundle: Artifact | None = None) -> Provider: # When interacting with Tpx, we just pass our various inputs via env vars, # since Tpx basiclaly wants structured output for this. - xctest_bundle = cmd_args(xctest_bundle).hidden(dsym_artifact) if dsym_artifact else xctest_bundle + xctest_bundle = cmd_args(xctest_bundle, hidden = dsym_artifact) if dsym_artifact else xctest_bundle env = {"XCTEST_BUNDLE": xctest_bundle} if test_host_app_bundle == None: @@ -196,20 +222,27 @@ def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_ env["HOST_APP_BUNDLE"] = test_host_app_bundle tpx_label = "tpx:apple_test:buck2:appTest" + if ui_test_target_app_bundle != None: + env["TARGET_APP_BUNDLE"] = ui_test_target_app_bundle + tpx_label = "tpx:apple_test:buck2:uiTest" + labels = ctx.attrs.labels + [tpx_label] labels.append(tpx_label) sdk_name = get_apple_sdk_name(ctx) - if sdk_name == MacOSXSdkMetadata.name: + if ctx.attrs.test_re_capabilities: + remote_execution_properties = ctx.attrs.test_re_capabilities + + elif sdk_name == MacOSXSdkMetadata.name: # @oss-disable: remote_execution_properties = macos_test_re_capabilities() remote_execution_properties = None # @oss-enable else: # @oss-disable: requires_ios_booted_simulator = ctx.attrs.test_host_app != None or ctx.attrs.ui_test_target_app != None - # @oss-disable: remote_execution_properties = ios_test_re_capabilities(use_unbooted_simulator = not requires_ios_booted_simulator, use_m1_simulator = ctx.attrs.use_m1_simulator) + # @oss-disable: remote_execution_properties = ios_test_re_capabilities(use_unbooted_simulator = not requires_ios_booted_simulator) remote_execution_properties = None # @oss-enable - # @oss-disable: remote_execution_use_case = apple_test_re_use_case(macos_test = sdk_name == MacOSXSdkMetadata.name, use_m1_simulator = ctx.attrs.use_m1_simulator) + # @oss-disable: remote_execution_use_case = ctx.attrs.test_re_use_case or apple_test_re_use_case(macos_test = sdk_name == MacOSXSdkMetadata.name) remote_execution_use_case = None # @oss-enable local_enabled = remote_execution_use_case == None @@ -239,7 +272,7 @@ def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_ }, ) -def _get_test_host_app_bundle(ctx: AnalysisContext) -> [Artifact, None]: +def _get_test_host_app_bundle(ctx: AnalysisContext) -> Artifact | None: """ Get the bundle for the test host app, if one exists for this test. """ if ctx.attrs.test_host_app: # Copy the test host app bundle into test's output directory @@ -250,18 +283,29 @@ def _get_test_host_app_bundle(ctx: AnalysisContext) -> [Artifact, None]: return None -def _get_test_host_app_binary(ctx: AnalysisContext, test_host_app_bundle: [Artifact, None]) -> [cmd_args, None]: +def _get_test_host_app_binary(ctx: AnalysisContext, test_host_app_bundle: Artifact | None) -> [cmd_args, None]: """ Reference to the binary with the test host app bundle, if one exists for this test. Captures the bundle as an artifact in the cmd_args. """ if ctx.attrs.test_host_app == None: return None parts = [test_host_app_bundle] - rel_path = bundle_relative_path_for_destination(AppleBundleDestination("executables"), get_apple_sdk_name(ctx), ctx.attrs.extension) + rel_path = bundle_relative_path_for_destination(AppleBundleDestination("executables"), get_apple_sdk_name(ctx), ctx.attrs.extension, False) if len(rel_path) > 0: parts.append(rel_path) parts.append(ctx.attrs.test_host_app[AppleBundleInfo].binary_name) return cmd_args(parts, delimiter = "/") +def _get_ui_test_target_app_bundle(ctx: AnalysisContext) -> Artifact | None: + """ Get the bundle for the ui test target app, if one exists for this test. """ + if ctx.attrs.ui_test_target_app: + # Copy the ui test target app bundle into test's output directory + original_bundle = ctx.attrs.ui_test_target_app[AppleBundleInfo].bundle + ui_test_target_app_bundle = ctx.actions.declare_output(original_bundle.basename) + ctx.actions.copy_file(ui_test_target_app_bundle, original_bundle) + return ui_test_target_app_bundle + + return None + def _get_bundle_loader_flags(binary: [cmd_args, None]) -> list[typing.Any]: if binary: # During linking we need to link the test shared lib against the test host binary. The @@ -278,9 +322,14 @@ def _xcode_populate_attributes( test_host_app_binary: [cmd_args, None], **_kwargs) -> dict[str, typing.Any]: data = apple_populate_xcode_attributes(ctx = ctx, srcs = srcs, argsfiles = argsfiles, product_name = ctx.attrs.name) - data["output"] = xctest_bundle - if test_host_app_binary: - data["test_host_app_binary"] = test_host_app_binary + data[XcodeDataInfoKeys.OUTPUT] = xctest_bundle + if ctx.attrs.ui_test_target_app: + data[XcodeDataInfoKeys.TEST_TYPE] = "ui-test" + data[XcodeDataInfoKeys.TEST_TARGET] = ctx.attrs.ui_test_target_app.label.raw_target() + else: + data[XcodeDataInfoKeys.TEST_TYPE] = "unit-test" + if test_host_app_binary: + data[XcodeDataInfoKeys.TEST_HOST_APP_BINARY] = test_host_app_binary return data def _get_xctest_framework_search_paths(ctx: AnalysisContext) -> (cmd_args, cmd_args): @@ -306,28 +355,3 @@ def _get_xctest_framework_linker_flags(ctx: AnalysisContext) -> list[[cmd_args, "-F", xctest_framework_search_path, ] - -def _get_xctest_framework(ctx: AnalysisContext, swift_support_needed: bool) -> list[AppleBundlePart]: - swift_support = [ - _get_object_from_platform_path(ctx, "Developer/usr/lib/libXCTestSwiftSupport.dylib"), - ] if swift_support_needed else [] - return [ - _get_object_from_platform_path(ctx, "Developer/Library/Frameworks/XCTest.framework"), - _get_object_from_platform_path(ctx, "Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework"), - _get_object_from_platform_path(ctx, "Developer/Library/PrivateFrameworks/XCTestCore.framework"), - _get_object_from_platform_path(ctx, "Developer/Library/PrivateFrameworks/XCTestSupport.framework"), - _get_object_from_platform_path(ctx, "Developer/Library/PrivateFrameworks/XCUIAutomation.framework"), - _get_object_from_platform_path(ctx, "Developer/Library/PrivateFrameworks/XCUnit.framework"), - _get_object_from_platform_path(ctx, "Developer/usr/lib/libXCTestBundleInject.dylib"), - ] + swift_support - -def _get_object_from_platform_path(ctx: AnalysisContext, platform_relative_path: str) -> AppleBundlePart: - toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo] - copied_framework = ctx.actions.declare_output(paths.basename(platform_relative_path)) - - # We have to copy because: - # 1) Platform path might be a string (e.g. for Xcode toolchains) - # 2) It's not possible to project artifact which is not produced by different target (and platform path is a separate target for distributed toolchains). - ctx.actions.run(["cp", "-PR", cmd_args(toolchain.platform_path, platform_relative_path, delimiter = "/"), copied_framework.as_output()], category = "extract_framework", identifier = platform_relative_path) - - return AppleBundlePart(source = copied_framework, destination = AppleBundleDestination("frameworks"), codesign_on_copy = True) diff --git a/prelude/apple/apple_test_host_app_transition.bzl b/prelude/apple/apple_test_host_app_transition.bzl new file mode 100644 index 00000000000..d239b018a31 --- /dev/null +++ b/prelude/apple/apple_test_host_app_transition.bzl @@ -0,0 +1,38 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +def _apple_test_host_app_transition_impl(platform: PlatformInfo, refs: struct, attrs: struct) -> PlatformInfo: + if not attrs.embed_xctest_frameworks_in_test_host_app: + return platform + + updated_constraints = dict(platform.configuration.constraints) + + test_host_marker_setting_label = refs.embed_xctest_frameworks_constraint_setting[ConstraintSettingInfo].label + if test_host_marker_setting_label in updated_constraints: + return platform + + test_host_marker_value_info = refs.embed_xctest_frameworks_marker_constraint_value[ConstraintValueInfo] + updated_constraints[test_host_marker_setting_label] = test_host_marker_value_info + + return PlatformInfo( + label = platform.label + "-test-host-app", + configuration = ConfigurationInfo( + constraints = updated_constraints, + values = platform.configuration.values, + ), + ) + +apple_test_host_app_transition = transition( + impl = _apple_test_host_app_transition_impl, + refs = { + "embed_xctest_frameworks_constraint_setting": "config//marker/apple/constraints:embed_xctest_frameworks", + "embed_xctest_frameworks_marker_constraint_value": "config//marker/apple/constraints:embed_xctest_frameworks_enabled", + }, + attrs = [ + "embed_xctest_frameworks_in_test_host_app", + ], +) diff --git a/prelude/apple/apple_toolchain.bzl b/prelude/apple/apple_toolchain.bzl index 3e0802db4a7..e5c236b3fcf 100644 --- a/prelude/apple/apple_toolchain.bzl +++ b/prelude/apple/apple_toolchain.bzl @@ -12,6 +12,7 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainIn def apple_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: sdk_path = ctx.attrs._internal_sdk_path or ctx.attrs.sdk_path platform_path = ctx.attrs._internal_platform_path or ctx.attrs.platform_path + return [ DefaultInfo(), AppleToolchainInfo( @@ -31,17 +32,16 @@ def apple_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: installer = ctx.attrs.installer, libtool = ctx.attrs.libtool[RunInfo], lipo = ctx.attrs.lipo[RunInfo], - min_version = ctx.attrs.min_version, + mapc = ctx.attrs.mapc[RunInfo] if ctx.attrs.mapc else None, + merge_index_store = ctx.attrs.merge_index_store[RunInfo], momc = ctx.attrs.momc[RunInfo], objdump = ctx.attrs.objdump[RunInfo] if ctx.attrs.objdump else None, - odrcov = ctx.attrs.odrcov[RunInfo] if ctx.attrs.odrcov else None, platform_path = platform_path, sdk_build_version = ctx.attrs.build_version, sdk_name = ctx.attrs.sdk_name, sdk_path = sdk_path, sdk_version = ctx.attrs.version, swift_toolchain_info = ctx.attrs.swift_toolchain[SwiftToolchainInfo] if ctx.attrs.swift_toolchain else None, - watch_kit_stub_binary = ctx.attrs.watch_kit_stub_binary, xcode_build_version = ctx.attrs.xcode_build_version, xcode_version = ctx.attrs.xcode_version, xctest = ctx.attrs.xctest[RunInfo], diff --git a/prelude/apple/apple_toolchain_types.bzl b/prelude/apple/apple_toolchain_types.bzl index d94c9676bb3..d4a75136fb6 100644 --- a/prelude/apple/apple_toolchain_types.bzl +++ b/prelude/apple/apple_toolchain_types.bzl @@ -5,56 +5,59 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftToolchainInfo") +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo") + AppleToolchainInfo = provider( # @unsorted-dict-items fields = { - "actool": provider_field(typing.Any, default = None), # "RunInfo" - "architecture": provider_field(typing.Any, default = None), # str - "codesign_allocate": provider_field(typing.Any, default = None), # "RunInfo" - "codesign_identities_command": provider_field(typing.Any, default = None), # ["RunInfo", None] - "codesign": provider_field(typing.Any, default = None), # "RunInfo" - "compile_resources_locally": provider_field(typing.Any, default = None), # bool - "copy_scene_kit_assets": provider_field(typing.Any, default = None), # "RunInfo" - "cxx_platform_info": provider_field(typing.Any, default = None), # "CxxPlatformInfo" - "cxx_toolchain_info": provider_field(typing.Any, default = None), # "CxxToolchainInfo" - "dsymutil": provider_field(typing.Any, default = None), # "RunInfo" - "dwarfdump": provider_field(typing.Any, default = None), # ["RunInfo", None] - "extra_linker_outputs": provider_field(typing.Any, default = None), # [str] - "ibtool": provider_field(typing.Any, default = None), # "RunInfo" - "installer": provider_field(typing.Any, default = None), # label - "libtool": provider_field(typing.Any, default = None), # "RunInfo" - "lipo": provider_field(typing.Any, default = None), # "RunInfo" - "min_version": provider_field(typing.Any, default = None), # [None, str] - "momc": provider_field(typing.Any, default = None), # "RunInfo" + "actool": provider_field(RunInfo), + "architecture": provider_field(str), + "codesign_allocate": provider_field(RunInfo), + "codesign_identities_command": provider_field(RunInfo | None, default = None), + "codesign": provider_field(RunInfo), + "compile_resources_locally": provider_field(bool), + "copy_scene_kit_assets": provider_field(RunInfo), + "cxx_platform_info": provider_field(CxxPlatformInfo), + "cxx_toolchain_info": provider_field(CxxToolchainInfo), + "dsymutil": provider_field(RunInfo), + "dwarfdump": provider_field(RunInfo | None, default = None), + "extra_linker_outputs": provider_field(list[str]), + "ibtool": provider_field(RunInfo), + "installer": provider_field(Label), + "libtool": provider_field(RunInfo), + "lipo": provider_field(RunInfo), + "mapc": provider_field(RunInfo | None, default = None), + "merge_index_store": provider_field(RunInfo), + "momc": provider_field(RunInfo), "objdump": provider_field(RunInfo | None, default = None), - "odrcov": provider_field(typing.Any, default = None), # ["RunInfo", None] - "platform_path": provider_field(typing.Any, default = None), # [str, artifact] - "sdk_build_version": provider_field(typing.Any, default = None), # "[None, str]" + "platform_path": provider_field(str | Artifact), + "sdk_build_version": provider_field(str | None, default = None), # SDK name to be passed to tools (e.g. actool), equivalent to ApplePlatform::getExternalName() in v1. - "sdk_name": provider_field(typing.Any, default = None), # str - "sdk_path": provider_field(typing.Any, default = None), # [str, artifact] + "sdk_name": provider_field(str), + "sdk_path": provider_field(str | Artifact), # TODO(T124581557) Make it non-optional once there is no "selected xcode" toolchain - "sdk_version": provider_field(typing.Any, default = None), # [None, str] - "swift_toolchain_info": provider_field(typing.Any, default = None), # "SwiftToolchainInfo" - "watch_kit_stub_binary": provider_field(typing.Any, default = None), # "artifact" - "xcode_build_version": provider_field(typing.Any, default = None), # "[None, str]" - "xcode_version": provider_field(typing.Any, default = None), # "[None, str]" - "xctest": provider_field(typing.Any, default = None), # "RunInfo" + "sdk_version": provider_field(str | None, default = None), + "swift_toolchain_info": provider_field(SwiftToolchainInfo), + "xcode_build_version": provider_field(str | None, default = None), + "xcode_version": provider_field(str | None, default = None), + "xctest": provider_field(RunInfo), }, ) AppleToolsInfo = provider( # @unsorted-dict-items fields = { - "assemble_bundle": provider_field(typing.Any, default = None), # RunInfo - "split_arch_combine_dsym_bundles_tool": provider_field(typing.Any, default = None), # RunInfo - "dry_codesign_tool": provider_field(typing.Any, default = None), # "RunInfo" - "adhoc_codesign_tool": provider_field(typing.Any, default = None), # "RunInfo" - "selective_debugging_scrubber": provider_field(typing.Any, default = None), # "RunInfo" - "info_plist_processor": provider_field(typing.Any, default = None), # RunInfo - "ipa_package_maker": provider_field(typing.Any, default = None), # RunInfo - "make_modulemap": provider_field(typing.Any, default = None), # "RunInfo" - "make_vfsoverlay": provider_field(typing.Any, default = None), # "RunInfo" - "swift_objc_header_postprocess": provider_field(typing.Any, default = None), # "RunInfo" + "assemble_bundle": provider_field(RunInfo), + "split_arch_combine_dsym_bundles_tool": provider_field(RunInfo), + "dry_codesign_tool": provider_field(RunInfo), + "adhoc_codesign_tool": provider_field(RunInfo), + "selective_debugging_scrubber": provider_field(RunInfo), + "info_plist_processor": provider_field(RunInfo), + "ipa_package_maker": provider_field(RunInfo), + "make_modulemap": provider_field(RunInfo), + "make_vfsoverlay": provider_field(RunInfo), + "framework_sanitizer": provider_field(RunInfo), + "xcframework_maker": provider_field(RunInfo), }, ) diff --git a/prelude/apple/apple_universal_binaries.bzl b/prelude/apple/apple_universal_binaries.bzl index f5c3fbe6ee8..d5c1a928676 100644 --- a/prelude/apple/apple_universal_binaries.bzl +++ b/prelude/apple/apple_universal_binaries.bzl @@ -11,20 +11,42 @@ load(":apple_bundle_types.bzl", "AppleBundleBinaryOutput") load(":apple_toolchain_types.bzl", "AppleToolsInfo") load(":debug.bzl", "AppleDebuggableInfo") -def create_universal_binary( +def get_universal_binary_name(ctx: AnalysisContext) -> str: + if ctx.attrs.executable_name: + return ctx.attrs.executable_name + binary_deps = ctx.attrs.executable + + # Because `binary_deps` is a split transition of the same target, + # the filenames would be identical, so we just pick the first one. + first_binary_dep = binary_deps.values()[0] + first_binary_artifact = first_binary_dep[DefaultInfo].default_outputs[0] + + # The universal executable should have the same name as the base/thin ones + return first_binary_artifact.short_path + +def lipo_binaries( ctx: AnalysisContext, binary_deps: dict[str, Dependency], binary_name: [str, None], - dsym_bundle_name: [str, None], - split_arch_dsym: bool) -> AppleBundleBinaryOutput: + lipo: RunInfo) -> Artifact: binary_output = ctx.actions.declare_output("UniversalBinary" if binary_name == None else binary_name, dir = False) - lipo_cmd = cmd_args([ctx.attrs._apple_toolchain[AppleToolchainInfo].lipo]) + lipo_cmd = [lipo] for (_, binary) in binary_deps.items(): - lipo_cmd.add(cmd_args(binary[DefaultInfo].default_outputs[0])) + lipo_cmd.append(cmd_args(binary[DefaultInfo].default_outputs[0])) + + lipo_cmd.extend(["-create", "-output", binary_output.as_output()]) + ctx.actions.run(cmd_args(lipo_cmd), category = "lipo") - lipo_cmd.add(["-create", "-output", binary_output.as_output()]) - ctx.actions.run(lipo_cmd, category = "lipo") + return binary_output + +def create_universal_binary( + ctx: AnalysisContext, + binary_deps: dict[str, Dependency], + binary_name: [str, None], + dsym_bundle_name: [str, None], + split_arch_dsym: bool) -> AppleBundleBinaryOutput: + binary_output = lipo_binaries(ctx, binary_deps, binary_name, ctx.attrs._apple_toolchain[AppleToolchainInfo].lipo) # Universal binaries can be created out of plain `cxx_binary()` / `cxx_library()` # which lack the `AppleDebuggableInfo` provider. @@ -34,12 +56,12 @@ def create_universal_binary( dsym_output = None if split_arch_dsym and contains_full_debuggable_info: dsym_output = ctx.actions.declare_output("UniversalBinary.dSYM" if dsym_bundle_name == None else dsym_bundle_name, dir = True) - dsym_combine_cmd = cmd_args([ctx.attrs._apple_tools[AppleToolsInfo].split_arch_combine_dsym_bundles_tool]) + dsym_combine_cmd = [ctx.attrs._apple_tools[AppleToolsInfo].split_arch_combine_dsym_bundles_tool] for (arch, binary) in binary_deps.items(): - dsym_combine_cmd.add(["--dsym-bundle", cmd_args(binary.get(AppleDebuggableInfo).dsyms[0]), "--arch", arch]) - dsym_combine_cmd.add(["--output", dsym_output.as_output()]) - ctx.actions.run(dsym_combine_cmd, category = "universal_binaries_dsym") + dsym_combine_cmd.extend(["--dsym-bundle", cmd_args(binary.get(AppleDebuggableInfo).dsyms[0]), "--arch", arch]) + dsym_combine_cmd.extend(["--output", dsym_output.as_output()]) + ctx.actions.run(cmd_args(dsym_combine_cmd), category = "universal_binaries_dsym") all_debug_info_tsets = [] if contains_full_debuggable_info: diff --git a/prelude/apple/apple_universal_executable.bzl b/prelude/apple/apple_universal_executable.bzl index a8ca1e605d1..0e06bcae8f1 100644 --- a/prelude/apple/apple_universal_executable.bzl +++ b/prelude/apple/apple_universal_executable.bzl @@ -13,7 +13,7 @@ load(":apple_bundle_types.bzl", "AppleBundleLinkerMapInfo", "AppleMinDeploymentV load(":apple_bundle_utility.bzl", "get_default_binary_dep", "get_flattened_binary_deps", "merge_bundle_linker_maps_info") load(":apple_code_signing_types.bzl", "AppleEntitlementsInfo") load(":apple_dsym.bzl", "DSYM_SUBTARGET", "get_apple_dsym_ext") -load(":apple_universal_binaries.bzl", "create_universal_binary") +load(":apple_universal_binaries.bzl", "create_universal_binary", "get_universal_binary_name") load(":debug.bzl", "AppleDebuggableInfo", "DEBUGINFO_SUBTARGET") load(":resource_groups.bzl", "ResourceGraphInfo") @@ -28,25 +28,12 @@ _MERGED_PROVIDER_TYPES = [ AppleBundleLinkerMapInfo, ] -def _get_universal_binary_name(ctx: AnalysisContext) -> str: - if ctx.attrs.executable_name: - return ctx.attrs.executable_name - binary_deps = ctx.attrs.executable - - # Because `binary_deps` is a split transition of the same target, - # the filenames would be identical, so we just pick the first one. - first_binary_dep = binary_deps.values()[0] - first_binary_artifact = first_binary_dep[DefaultInfo].default_outputs[0] - - # The universal executable should have the same name as the base/thin ones - return first_binary_artifact.short_path - def apple_universal_executable_impl(ctx: AnalysisContext) -> list[Provider]: dsym_name = ctx.attrs.name + ".dSYM" binary_outputs = create_universal_binary( ctx = ctx, binary_deps = ctx.attrs.executable, - binary_name = _get_universal_binary_name(ctx), + binary_name = get_universal_binary_name(ctx), dsym_bundle_name = dsym_name, split_arch_dsym = ctx.attrs.split_arch_dsym, ) diff --git a/prelude/apple/apple_utility.bzl b/prelude/apple/apple_utility.bzl index 529793638d3..7e12625f70e 100644 --- a/prelude/apple/apple_utility.bzl +++ b/prelude/apple/apple_utility.bzl @@ -8,20 +8,6 @@ load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load("@prelude//cxx:headers.bzl", "CxxHeadersLayout", "CxxHeadersNaming") load("@prelude//utils:utils.bzl", "value_or") -load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node") - -_VERSION_PLACEHOLDER = "(VERSION)" - -# TODO(T115177501): Make target triples part of the toolchains -# Map from SDK name -> target triple _without_ leading architecture -_TARGET_TRIPLE_MAP = { - "iphoneos": "apple-ios{}".format(_VERSION_PLACEHOLDER), - "iphonesimulator": "apple-ios{}-simulator".format(_VERSION_PLACEHOLDER), - "maccatalyst": "apple-ios{}-macabi".format(_VERSION_PLACEHOLDER), - "macosx": "apple-macosx{}".format(_VERSION_PLACEHOLDER), - "watchos": "apple-watchos{}".format(_VERSION_PLACEHOLDER), - "watchsimulator": "apple-watchos{}-simulator".format(_VERSION_PLACEHOLDER), -} def get_apple_cxx_headers_layout(ctx: AnalysisContext) -> CxxHeadersLayout: namespace = value_or(ctx.attrs.header_path_prefix, ctx.attrs.name) @@ -36,24 +22,6 @@ def has_apple_toolchain(ctx: AnalysisContext) -> bool: def get_apple_architecture(ctx: AnalysisContext) -> str: return ctx.attrs._apple_toolchain[AppleToolchainInfo].architecture -def get_versioned_target_triple(ctx: AnalysisContext) -> str: - apple_toolchain_info = ctx.attrs._apple_toolchain[AppleToolchainInfo] - swift_toolchain_info = apple_toolchain_info.swift_toolchain_info - - architecture = swift_toolchain_info.architecture - if architecture == None: - fail("Need to set `architecture` field of swift_toolchain(), target: {}".format(ctx.label)) - - target_sdk_version = get_min_deployment_version_for_node(ctx) or "" - - sdk_name = apple_toolchain_info.sdk_name - target_triple_with_version_placeholder = _TARGET_TRIPLE_MAP.get(sdk_name) - if target_triple_with_version_placeholder == None: - fail("Could not find target triple for sdk = {}".format(sdk_name)) - - versioned_target_triple = target_triple_with_version_placeholder.replace(_VERSION_PLACEHOLDER, target_sdk_version) - return "{}-{}".format(architecture, versioned_target_triple) - def get_apple_stripped_attr_value_with_default_fallback(ctx: AnalysisContext) -> bool: stripped = ctx.attrs.stripped if stripped != None: @@ -73,15 +41,15 @@ def expand_relative_prefixed_sdk_path( "$RESOURCEDIR": swift_resource_dir, "$SDKROOT": sdk_path, } - expanded_cmd = cmd_args() + expanded_cmd = [] for (path_variable, path_value) in path_expansion_map.items(): if path_to_expand.startswith(path_variable): path = path_to_expand[len(path_variable):] if path.find("$") == 0: fail("Failed to expand framework path: {}".format(path)) - expanded_cmd.add(cmd_args([path_value, path], delimiter = "")) + expanded_cmd.append(cmd_args([path_value, path], delimiter = "")) - return expanded_cmd + return cmd_args(expanded_cmd) def get_disable_pch_validation_flags() -> list[str]: """ diff --git a/prelude/apple/apple_xctest_frameworks_utility.bzl b/prelude/apple/apple_xctest_frameworks_utility.bzl new file mode 100644 index 00000000000..c33d59a9222 --- /dev/null +++ b/prelude/apple/apple_xctest_frameworks_utility.bzl @@ -0,0 +1,42 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//apple:apple_bundle_destination.bzl", "AppleBundleDestination") +load("@prelude//apple:apple_bundle_part.bzl", "AppleBundlePart") +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") + +def get_xctest_frameworks_bundle_parts(ctx: AnalysisContext, swift_support_needed: bool) -> list[AppleBundlePart]: + swift_support = [] + if swift_support_needed: + swift_support.append(_get_object_from_platform_path(ctx, "Developer/usr/lib/libXCTestSwiftSupport.dylib")) + + # T201426509: Xcode 16 introduces the Swift Testing framework + # that is a load dependency of libXCTestSwiftSupport.dylib + if int(ctx.attrs._apple_toolchain[AppleToolchainInfo].xcode_version[:2]) >= 16: + swift_support.append(_get_object_from_platform_path(ctx, "Developer/Library/Frameworks/Testing.framework")) + + return [ + _get_object_from_platform_path(ctx, "Developer/Library/Frameworks/XCTest.framework"), + _get_object_from_platform_path(ctx, "Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework"), + _get_object_from_platform_path(ctx, "Developer/Library/PrivateFrameworks/XCTestCore.framework"), + _get_object_from_platform_path(ctx, "Developer/Library/PrivateFrameworks/XCTestSupport.framework"), + _get_object_from_platform_path(ctx, "Developer/Library/PrivateFrameworks/XCUIAutomation.framework"), + _get_object_from_platform_path(ctx, "Developer/Library/PrivateFrameworks/XCUnit.framework"), + _get_object_from_platform_path(ctx, "Developer/usr/lib/libXCTestBundleInject.dylib"), + ] + swift_support + +def _get_object_from_platform_path(ctx: AnalysisContext, platform_relative_path: str) -> AppleBundlePart: + toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo] + copied_framework = ctx.actions.declare_output(paths.basename(platform_relative_path)) + + # We have to copy because: + # 1) Platform path might be a string (e.g. for Xcode toolchains) + # 2) It's not possible to project artifact which is not produced by different target (and platform path is a separate target for distributed toolchains). + ctx.actions.run(["cp", "-PR", cmd_args(toolchain.platform_path, platform_relative_path, delimiter = "/"), copied_framework.as_output()], category = "extract_framework", identifier = platform_relative_path) + + return AppleBundlePart(source = copied_framework, destination = AppleBundleDestination("frameworks"), codesign_on_copy = True) diff --git a/prelude/apple/apple_xcuitest.bzl b/prelude/apple/apple_xcuitest.bzl index 6aa5052ed69..d08d7264605 100644 --- a/prelude/apple/apple_xcuitest.bzl +++ b/prelude/apple/apple_xcuitest.bzl @@ -21,7 +21,7 @@ def apple_xcuitest_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: _get_xctrunner_binary(ctx), _get_uitest_bundle(ctx), ] + _get_xctrunner_frameworks(ctx) - assemble_bundle( + bundle_result = assemble_bundle( ctx = ctx, bundle = output_bundle, info_plist_part = process_info_plist(ctx, override_input = None), @@ -39,7 +39,7 @@ def apple_xcuitest_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: # The test runner binary does not contain Swift skip_copying_swift_stdlib = True, ), - ] + ] + bundle_result.providers def _get_uitest_bundle(ctx: AnalysisContext) -> AppleBundlePart: return AppleBundlePart( diff --git a/prelude/apple/cxx_universal_executable.bzl b/prelude/apple/cxx_universal_executable.bzl new file mode 100644 index 00000000000..c083f03f156 --- /dev/null +++ b/prelude/apple/cxx_universal_executable.bzl @@ -0,0 +1,26 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//apple:apple_universal_binaries.bzl", "get_universal_binary_name", "lipo_binaries") +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") + +def cxx_universal_executable_impl(ctx: AnalysisContext) -> list[Provider]: + lipo = ctx.attrs._cxx_toolchain[CxxToolchainInfo].lipo + if not lipo: + fail("`cxx_toolchain()` target does not have a valid `lipo` tool: {}".format(ctx.attrs._cxx_toolchain.label)) + + universal_binary = lipo_binaries( + ctx = ctx, + binary_deps = ctx.attrs.executable, + binary_name = get_universal_binary_name(ctx), + lipo = lipo, + ) + + return [ + DefaultInfo(default_output = universal_binary), + RunInfo(args = cmd_args(universal_binary)), + ] diff --git a/prelude/apple/debug.bzl b/prelude/apple/debug.bzl index 7b6b7239bd4..e7a91d1147a 100644 --- a/prelude/apple/debug.bzl +++ b/prelude/apple/debug.bzl @@ -26,13 +26,13 @@ AppleDebuggableInfo = provider( # a. the owning library target to artifacts, or # b. the owning bundle target to filtered artifacts "debug_info_tset": provider_field(ArtifactTSet), - # In the case of b above, contians the map of library target to artifacts, else None + # In the case of b above, contains the map of library target to artifacts, else None "filtered_map": provider_field([dict[Label, list[Artifact]], None], default = None), }, ) _AppleDebugInfo = record( - debug_info_tset = "ArtifactTSet", + debug_info_tset = ArtifactTSet, filtered_map = field([dict[Label, list[Artifact]], None]), ) diff --git a/prelude/apple/mockingbird/mockingbird_mock.bzl b/prelude/apple/mockingbird/mockingbird_mock.bzl new file mode 100644 index 00000000000..96690b9d216 --- /dev/null +++ b/prelude/apple/mockingbird/mockingbird_mock.bzl @@ -0,0 +1,184 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//apple:apple_platforms.bzl", "APPLE_PLATFORMS_KEY") +load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") +load(":mockingbird_types.bzl", "MockingbirdLibraryInfo", "MockingbirdLibraryRecord", "MockingbirdSourcesInfo") + +def _impl(ctx: AnalysisContext) -> list[Provider]: + mockingbird_info = ctx.attrs.module[MockingbirdLibraryInfo] + + dep_names = [dep[MockingbirdLibraryInfo].name for dep in ctx.attrs.deps] + included_srcs = [src.basename for src in ctx.attrs.srcs] + excluded_srcs = [src.basename for src in ctx.attrs.excluded_srcs] + + for src_name in included_srcs: + if not src_name.endswith(".swift"): + fail("srcs should only specify Swift files. Other source files, such as {}, do not need to be included.".format(src_name)) + + for src_name in excluded_srcs: + if not src_name.endswith(".swift"): + fail("excluded_srcs should only specify Swift files. Other source files, such as {}, do not need to be included.".format(src_name)) + + (json_project_description, src_dirs) = _get_mockingbird_json_project_description(info = mockingbird_info, included_srcs = included_srcs, excluded_srcs = excluded_srcs, dep_names = dep_names) + json_project_description_output = ctx.actions.declare_output("mockingbird_project.json") + ctx.actions.write_json(json_project_description_output.as_output(), json_project_description) + + mockingbird_source = ctx.actions.declare_output(mockingbird_info.name + "Mocks.generated.swift", dir = False) + cmd = cmd_args( + hidden = src_dirs, + ) + + params = [ + ctx.attrs._mockingbird_bin[RunInfo], + "generate", + "--target", + mockingbird_info.name, + "--project", + json_project_description_output, + "--output", + mockingbird_source.as_output(), + "--header", + "// (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary.", + "--support", + ctx.attrs._mockingbird_support[DefaultInfo].default_outputs, + "--verbose", + "--disable-cache", + ] + + if ctx.attrs.only_protocols: + params.append("--only-protocols") + + cmd.add(params) + + ctx.actions.run( + cmd, + category = "mockingbird", + local_only = True, # Mockingbird creates sockets for interprocess communication, which is deliberately blocked on RE. + weight_percentage = 100, + allow_cache_upload = True, + ) + + return [ + DefaultInfo(mockingbird_source), + MockingbirdSourcesInfo(srcs = [mockingbird_source]), + ] + +def _attrs(): + attribs = { + ## If the superclass for an object being mocked is in another module add it as a dep so mockingbird can find the implementation. + "deps": attrs.list(attrs.dep(), default = []), + ## The list of source files to exclude. Only the name of the file, excluding the path, should be set. If set, the srcs attribute will be ignored. + "excluded_srcs": attrs.set(attrs.source(), sorted = True, default = []), + ## The module to generate mocks for. + "module": attrs.dep(), + ## Whether to only generate mocks for Swift protocols. + "only_protocols": attrs.bool(default = False), + ## A list of source files to include. Only the name of the file, excluding the path, should be set. By default all source files are included and this doesn't need to be specified. + "srcs": attrs.set(attrs.source(), sorted = True, default = []), + "_mockingbird_bin": attrs.exec_dep(providers = [RunInfo], default = "fbsource//fbobjc/VendorLib/Mockingbird:mockingbird-binary"), + "_mockingbird_support": attrs.dep(providers = [DefaultInfo], default = "fbsource//fbobjc/VendorLib/Mockingbird:MockingbirdSupport"), + APPLE_PLATFORMS_KEY: attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}), + } + return attribs + +registration_spec = RuleRegistrationSpec( + name = "mockingbird_mock", + impl = _impl, + attrs = _attrs(), +) + +# Produce JSON project description for Mockingbird codegen +# https://mockingbirdswift.com/json-project-description +# { +# "targets": [ +# { +# "name": "MyLibrary", +# "type": "library", +# "path": "/path/to/MyLibrary", +# "dependencies": [], +# "sources": [ +# "SourceFileA.swift", +# "SourceFileB.swift" +# ] +# }, +# { +# "name": "MyOtherLibrary", +# "type": "library", +# "path": "/path/to/MyOtherLibrary", +# "dependencies": [ +# "MyLibrary" +# ], +# "sources": [ +# "SourceFileA.swift", +# "SourceFileB.swift" +# ] +# }, +# { +# "name": "MyLibraryTests", +# "type": "test", +# "path": "/path/to/MyLibraryTests", +# "dependencies": [ +# "MyLibrary" +# ], +# "sources": [ +# "SourceFileA.swift", +# "SourceFileB.swift" +# ] +# } +# ] +# } +def _get_mockingbird_json_project_description(info: MockingbirdLibraryInfo, included_srcs: list[str], excluded_srcs: list[str], dep_names: list[str]) -> (dict, list): + targets = [] + src_dirs = [] + for record in info.tset.traverse(): + if record.name == info.name: + targets.append(_target_dict_for_mockingbird_record(record = record, included_srcs = included_srcs, excluded_srcs = excluded_srcs, include_non_exported_deps = True)) + src_dirs.append(record.src_dir) + elif record.name in dep_names: + targets.append(_target_dict_for_mockingbird_record(record = record, included_srcs = [], excluded_srcs = [], include_non_exported_deps = False)) + src_dirs.append(record.src_dir) + json = { + "targets": targets, + } + + return (json, src_dirs) + +def _target_dict_for_mockingbird_record(record: MockingbirdLibraryRecord, included_srcs: list[str], excluded_srcs: list[str], include_non_exported_deps: bool) -> dict: + srcs = [] + if len(included_srcs) > 0 and len(excluded_srcs) > 0: + fail("Included srcs and excluded srcs cannot both be set at the same time") + + record_src_names = [src.basename for src in record.srcs] + + for specified_src in included_srcs + excluded_srcs: + if specified_src not in record_src_names: + fail("The source file {} does not exist in target {}".format(specified_src, record.name)) + + if len(included_srcs) > 0: + for src_name in record_src_names: + if src_name in included_srcs: + srcs.append(src_name) + elif len(excluded_srcs) > 0: + for src_name in record_src_names: + if src_name not in excluded_srcs: + srcs.append(src_name) + else: + srcs = record_src_names + + deps = record.exported_dep_names + + if include_non_exported_deps: + deps = deps + record.dep_names + + return { + "dependencies": deps, + "name": record.name, + "path": record.src_dir, + "sources": srcs, + "type": record.type, + } diff --git a/prelude/apple/mockingbird/mockingbird_types.bzl b/prelude/apple/mockingbird/mockingbird_types.bzl new file mode 100644 index 00000000000..0eee9cb67f1 --- /dev/null +++ b/prelude/apple/mockingbird/mockingbird_types.bzl @@ -0,0 +1,42 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +MockingbirdLibraryInfoTSet = transitive_set() + +MockingbirdTargetType = enum("library", "test") + +MockingbirdLibraryInfo = provider( + fields = { + # The name of the target. + "name": provider_field(str), + # Contains a tset with this target's MockingbirdLibraryRecord as the value + # and all of its dependency's MockingbirdLibraryRecord in the children. + "tset": provider_field(MockingbirdLibraryInfoTSet), + }, +) + +MockingbirdLibraryRecord = record( + # The names of this target's dependencies. + dep_names = field(list[str]), + # The names of this target's exported dependencies. + exported_dep_names = field(list[str]), + # The name of the target. + name = str, + # Swift sources in this target. + srcs = field(list[Artifact]), + # Whether this is a library or a test. + type = field(MockingbirdTargetType), + # Symlinked directory containing the source files. + src_dir = field(Artifact), +) + +MockingbirdSourcesInfo = provider( + fields = { + # Source files containing the auto generated mocks produced by mockingbird-cli. + "srcs": provider_field(list[Artifact]), + }, +) diff --git a/prelude/apple/modulemap.bzl b/prelude/apple/modulemap.bzl index 7cba3e9eb9c..7cfb0b7eb71 100644 --- a/prelude/apple/modulemap.bzl +++ b/prelude/apple/modulemap.bzl @@ -17,7 +17,7 @@ load( ) load(":apple_utility.bzl", "get_module_name") -def preprocessor_info_for_modulemap(ctx: AnalysisContext, name: str, headers: list[CHeader], swift_header: [Artifact, None]) -> CPreprocessor: +def preprocessor_info_for_modulemap(ctx: AnalysisContext, name: str, headers: list[CHeader], swift_header: Artifact | None) -> CPreprocessor: # We don't want to name this module.modulemap to avoid implicit importing if name == "module": fail("Don't use the name `module` for modulemaps, this will allow for implicit importing.") @@ -69,20 +69,20 @@ def preprocessor_info_for_modulemap(ctx: AnalysisContext, name: str, headers: li ctx.actions.run(cmd, category = "modulemap", identifier = name) return CPreprocessor( - relative_args = CPreprocessorArgs(args = _exported_preprocessor_args(symlink_tree)), - absolute_args = CPreprocessorArgs(args = _exported_preprocessor_args(symlink_tree)), + args = CPreprocessorArgs(args = _exported_preprocessor_args(symlink_tree)), modular_args = _args_for_modulemap(output, symlink_tree, swift_header), - modulemap_path = cmd_args(output).hidden(cmd_args(symlink_tree)), + modulemap_path = cmd_args(output, hidden = cmd_args(symlink_tree)), ) def _args_for_modulemap( modulemap: Artifact, symlink_tree: Artifact, - swift_header: [Artifact, None]) -> list[cmd_args]: - cmd = cmd_args(modulemap, format = "-fmodule-map-file={}") - cmd.hidden(symlink_tree) - if swift_header: - cmd.hidden(swift_header) + swift_header: Artifact | None) -> list[cmd_args]: + cmd = cmd_args( + modulemap, + format = "-fmodule-map-file={}", + hidden = [symlink_tree] + ([swift_header] if swift_header else []), + ) return [cmd] diff --git a/prelude/apple/prebuilt_apple_framework.bzl b/prelude/apple/prebuilt_apple_framework.bzl index e717eb40b30..bf51084d79c 100644 --- a/prelude/apple/prebuilt_apple_framework.bzl +++ b/prelude/apple/prebuilt_apple_framework.bzl @@ -5,10 +5,12 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:artifact_tset.bzl", "ArtifactTSet") load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_attr_exported_linker_flags", + "cxx_attr_preferred_linkage", "cxx_platform_supported", ) load( @@ -27,7 +29,6 @@ load( "LibOutputStyle", "LinkInfo", "LinkInfos", - "Linkage", "create_merged_link_info", ) load( @@ -41,9 +42,14 @@ load( "SharedLibraryInfo", "merge_shared_libraries", ) +load("@prelude//linking:strip.bzl", "strip_object") load("@prelude//utils:utils.bzl", "filter_and_map_idx") load(":apple_bundle_types.bzl", "AppleBundleInfo", "AppleBundleTypeDefault") +load(":apple_dsym.bzl", "DSYM_SUBTARGET") load(":apple_frameworks.bzl", "to_framework_name") +load(":apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") +load(":apple_utility.bzl", "get_apple_stripped_attr_value_with_default_fallback") +load(":debug.bzl", "AppleDebuggableInfo") def prebuilt_apple_framework_impl(ctx: AnalysisContext) -> list[Provider]: providers = [] @@ -63,7 +69,7 @@ def prebuilt_apple_framework_impl(ctx: AnalysisContext) -> list[Provider]: inherited_pp_info = cxx_inherited_preprocessor_infos(ctx.attrs.deps) providers.append(cxx_merge_cpreprocessors( ctx, - [CPreprocessor(relative_args = CPreprocessorArgs(args = ["-F", framework_dir]))], + [CPreprocessor(args = CPreprocessorArgs(args = ["-F", framework_dir]))], inherited_pp_info, )) @@ -78,10 +84,12 @@ def prebuilt_apple_framework_impl(ctx: AnalysisContext) -> list[Provider]: name = framework_name, pre_flags = args, ) + link_info = LinkInfos(default = link) + providers.append(create_merged_link_info( ctx, get_cxx_toolchain_info(ctx).pic_behavior, - {output_style: LinkInfos(default = link) for output_style in LibOutputStyle}, + {output_style: link_info for output_style in LibOutputStyle}, )) # Create, augment and provide the linkable graph. @@ -91,8 +99,8 @@ def prebuilt_apple_framework_impl(ctx: AnalysisContext) -> list[Provider]: ctx, linkable_node = create_linkable_node( ctx, - preferred_linkage = Linkage("shared"), - link_infos = {LibOutputStyle("shared_lib"): LinkInfos(default = link)}, + preferred_linkage = cxx_attr_preferred_linkage(ctx), + link_infos = {output_style: link_info for output_style in LibOutputStyle}, # TODO(cjhopman): this should be set to non-None default_soname = None, ), @@ -101,15 +109,52 @@ def prebuilt_apple_framework_impl(ctx: AnalysisContext) -> list[Provider]: ) providers.append(linkable_graph) + providers.append(merge_link_group_lib_info(deps = ctx.attrs.deps)) + providers.append(merge_shared_libraries(ctx.actions, deps = filter_and_map_idx(SharedLibraryInfo, ctx.attrs.deps))) + # The default output is the provided framework. - providers.append(DefaultInfo(default_output = framework_directory_artifact)) + sub_targets = { + "distribution": _sanitize_framework_for_app_distribution(ctx, framework_directory_artifact) + providers, + } + + if ctx.attrs.dsyms: + sub_targets[DSYM_SUBTARGET] = [DefaultInfo(default_outputs = ctx.attrs.dsyms)] + providers.append(AppleDebuggableInfo(dsyms = ctx.attrs.dsyms, debug_info_tset = ArtifactTSet())) + + providers.append(DefaultInfo(default_output = framework_directory_artifact, sub_targets = sub_targets)) providers.append(AppleBundleInfo( bundle = framework_directory_artifact, bundle_type = AppleBundleTypeDefault, skip_copying_swift_stdlib = True, contains_watchapp = None, )) - providers.append(merge_link_group_lib_info(deps = ctx.attrs.deps)) - providers.append(merge_shared_libraries(ctx.actions, deps = filter_and_map_idx(SharedLibraryInfo, ctx.attrs.deps))) return providers + +def _sanitize_framework_for_app_distribution(ctx: AnalysisContext, framework_directory_artifact: Artifact) -> list[Provider]: + framework_name = to_framework_name(framework_directory_artifact.basename) + bundle_for_app_distribution = ctx.actions.declare_output(framework_name + ".framework", dir = True) + + apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] + framework_sanitize_command = cmd_args([ + apple_tools.framework_sanitizer, + "--input", + framework_directory_artifact, + "--output", + bundle_for_app_distribution.as_output(), + ]) + + if get_apple_stripped_attr_value_with_default_fallback(ctx): + strip_args = cmd_args("-x") + stripped = strip_object(ctx, ctx.attrs._apple_toolchain[AppleToolchainInfo].cxx_toolchain_info, framework_directory_artifact.project(framework_name), strip_args, "framework_distribution") + framework_sanitize_command.add("--replacement-binary", stripped) + + ctx.actions.run(framework_sanitize_command, category = "sanitize_prebuilt_apple_framework") + providers = [DefaultInfo(default_output = bundle_for_app_distribution)] + providers.append(AppleBundleInfo( + bundle = bundle_for_app_distribution, + bundle_type = AppleBundleTypeDefault, + skip_copying_swift_stdlib = True, + contains_watchapp = None, + )) + return providers diff --git a/prelude/apple/resource_groups.bzl b/prelude/apple/resource_groups.bzl index 8b5f63ba2da..abc9a49238a 100644 --- a/prelude/apple/resource_groups.bzl +++ b/prelude/apple/resource_groups.bzl @@ -5,14 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load( - "@prelude//cxx:groups.bzl", - "Group", - "MATCH_ALL_LABEL", -) +load("@prelude//cxx:groups_types.bzl", "Group", "MATCH_ALL_LABEL") load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal_by", + "depth_first_traversal_by", ) load(":apple_asset_catalog_types.bzl", "AppleAssetCatalogSpec") load(":apple_core_data_types.bzl", "AppleCoreDataSpec") @@ -31,10 +27,12 @@ ResourceGroupInfo = provider( # NOTE(agallagher): We do this to maintain existing behavior w/ the # standalone `resource_group_map()` rule, but it's not clear if it's # actually desirable behavior. - "implicit_deps": provider_field(list[Dependency]), + "resource_group_to_implicit_deps_mapping": provider_field(dict[str, list[Dependency]]), }, ) +RESOURCE_GROUP_MAP_ATTR = attrs.option(attrs.dep(providers = [ResourceGroupInfo]), default = None) + ResourceGraphNode = record( label = field(Label), # Attribute labels on the target. @@ -160,7 +158,7 @@ def get_filtered_resources( node = resource_graph_node_map[target] # buildifier: disable=uninitialized return node.exported_deps + node.deps - targets = breadth_first_traversal_by( + targets = depth_first_traversal_by( resource_graph_node_map, get_traversed_deps(root), get_traversed_deps, diff --git a/prelude/apple/scene_kit_assets.bzl b/prelude/apple/scene_kit_assets.bzl index 650919b3399..96268c8fb61 100644 --- a/prelude/apple/scene_kit_assets.bzl +++ b/prelude/apple/scene_kit_assets.bzl @@ -24,7 +24,7 @@ def scene_kit_assets_impl(ctx: AnalysisContext) -> list[Provider]: ) return [DefaultInfo(), graph] -def compile_scene_kit_assets(ctx: AnalysisContext, specs: list[SceneKitAssetsSpec]) -> [Artifact, None]: +def compile_scene_kit_assets(ctx: AnalysisContext, specs: list[SceneKitAssetsSpec]) -> Artifact | None: if len(specs) == 0: return None @@ -50,9 +50,15 @@ def compile_scene_kit_assets(ctx: AnalysisContext, specs: list[SceneKitAssetsSpe ], allow_args = True, ) - combined_command = cmd_args(["/bin/sh", wrapper_script]).hidden(copy_scene_kit_assets_cmds + [output.as_output()]) + combined_command = cmd_args(["/bin/sh", wrapper_script], hidden = copy_scene_kit_assets_cmds + [output.as_output()]) processing_options = get_bundle_resource_processing_options(ctx) - ctx.actions.run(combined_command, prefer_local = processing_options.prefer_local, allow_cache_upload = processing_options.allow_cache_upload, category = "scene_kit_assets") + ctx.actions.run( + combined_command, + prefer_local = processing_options.prefer_local, + prefer_remote = processing_options.prefer_remote, + allow_cache_upload = processing_options.allow_cache_upload, + category = "scene_kit_assets", + ) return output def _get_copy_scene_kit_assets_cmd(ctx: AnalysisContext, scene_kit_assets_spec: SceneKitAssetsSpec) -> cmd_args: diff --git a/prelude/apple/swift/apple_sdk_clang_module.bzl b/prelude/apple/swift/apple_sdk_clang_module.bzl index f861940873d..4ef7ab889ea 100644 --- a/prelude/apple/swift/apple_sdk_clang_module.bzl +++ b/prelude/apple/swift/apple_sdk_clang_module.bzl @@ -21,6 +21,7 @@ def apple_sdk_clang_module_impl(ctx: AnalysisContext) -> list[Provider]: partial_cmd = cmd, input_relative_path = ctx.attrs.modulemap_relative_path, deps = ctx.attrs.deps, + cxx_deps = ctx.attrs.cxx_deps, ) return [ @@ -32,6 +33,7 @@ def apple_sdk_clang_module_impl(ctx: AnalysisContext) -> list[Provider]: apple_sdk_clang_module = rule( impl = apple_sdk_clang_module_impl, attrs = { + "cxx_deps": attrs.list(attrs.dep(), default = []), "deps": attrs.list(attrs.dep(), default = []), "is_framework": attrs.bool(default = False), # This is a real module name, contrary to `name` diff --git a/prelude/apple/swift/apple_sdk_swift_module.bzl b/prelude/apple/swift/apple_sdk_swift_module.bzl index c734c0e456d..928ba5dfe58 100644 --- a/prelude/apple/swift/apple_sdk_swift_module.bzl +++ b/prelude/apple/swift/apple_sdk_swift_module.bzl @@ -37,6 +37,7 @@ def apple_sdk_swift_module_impl(ctx: AnalysisContext) -> list[Provider]: overlays = [SdkSwiftOverlayInfo(overlays = ctx.attrs.overlays)] module_info = SdkUncompiledModuleInfo( + cxx_deps = ctx.attrs.cxx_deps, deps = ctx.attrs.deps, input_relative_path = ctx.attrs.swiftinterface_relative_path, is_framework = ctx.attrs.is_framework, @@ -55,6 +56,7 @@ def apple_sdk_swift_module_impl(ctx: AnalysisContext) -> list[Provider]: apple_sdk_swift_module = rule( impl = apple_sdk_swift_module_impl, attrs = { + "cxx_deps": attrs.list(attrs.dep(), default = []), "deps": attrs.list(attrs.dep(), default = []), "is_framework": attrs.bool(default = False), # This is a real module name, contrary to `name` diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 75213702cd7..f44873ff11a 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -7,20 +7,25 @@ load( "@prelude//:artifact_tset.bzl", + "ArtifactInfoTag", "ArtifactTSet", # @unused Used as a type "make_artifact_tset", "project_artifacts", ) load("@prelude//:paths.bzl", "paths") -load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") -load("@prelude//apple:apple_utility.bzl", "get_disable_pch_validation_flags", "get_module_name", "get_versioned_target_triple") +load("@prelude//apple:apple_error_handler.bzl", "apple_build_error_handler") +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") +load("@prelude//apple:apple_utility.bzl", "get_disable_pch_validation_flags", "get_module_name") load("@prelude//apple:modulemap.bzl", "preprocessor_info_for_modulemap") load("@prelude//apple/swift:swift_types.bzl", "SWIFTMODULE_EXTENSION", "SWIFT_EXTENSION") load("@prelude//cxx:argsfiles.bzl", "CompileArgsfile", "CompileArgsfiles") +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load("@prelude//cxx:cxx_library_utility.bzl", "cxx_use_shlib_intfs_mode") load( - "@prelude//cxx:compile.bzl", + "@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type ) +load("@prelude//cxx:cxx_toolchain_types.bzl", "ShlibInterfacesMode") load("@prelude//cxx:headers.bzl", "CHeader") load( "@prelude//cxx:link_groups.bzl", @@ -34,6 +39,7 @@ load( "cxx_inherited_preprocessor_infos", "cxx_merge_cpreprocessors", ) +load("@prelude//cxx:target_sdk_version.bzl", "get_target_triple") load( "@prelude//linking:link_info.bzl", "LinkInfo", # @unused Used as a type @@ -49,10 +55,9 @@ load( load( ":swift_incremental_support.bzl", "get_incremental_object_compilation_flags", - "get_incremental_swiftmodule_compilation_flags", "should_build_swift_incrementally", ) -load(":swift_module_map.bzl", "write_swift_module_map_with_swift_deps") +load(":swift_module_map.bzl", "write_swift_module_map_with_deps") load(":swift_pcm_compilation.bzl", "compile_underlying_pcm", "get_compiled_pcm_deps_tset", "get_swift_pcm_anon_targets") load( ":swift_pcm_compilation_types.bzl", @@ -68,12 +73,8 @@ load( "SwiftToolchainInfo", ) -# {"module_name": [exported_headers]}, used for Swift header post processing -ExportedHeadersTSet = transitive_set() - SwiftDependencyInfo = provider(fields = { "debug_info_tset": provider_field(ArtifactTSet), - "exported_headers": provider_field(ExportedHeadersTSet), # Includes modules through exported_deps, used for compilation "exported_swiftmodules": provider_field(SwiftCompiledModuleTset), }) @@ -86,7 +87,14 @@ SwiftCompilationDatabase = record( SwiftObjectOutput = record( object_files = field(list[Artifact]), argsfiles = field(CompileArgsfiles), - output_map_artifact = field([Artifact, None]), + output_map_artifact = field(Artifact | None), + swiftdeps = field(list[Artifact]), +) + +SwiftLibraryForDistributionOutput = record( + swiftinterface = field(Artifact), + private_swiftinterface = field(Artifact), + swiftdoc = field(Artifact), ) SwiftCompilationOutput = record( @@ -102,6 +110,8 @@ SwiftCompilationOutput = record( pre = field(CPreprocessor), # Exported preprocessor info required for ObjC compilation of rdeps. exported_pre = field(CPreprocessor), + # Exported -Swift.h header + exported_swift_header = field(Artifact), # Argsfiles used to compile object files. argsfiles = field(CompileArgsfiles), # A tset of (SDK/first-party) swiftmodule artifacts required to be linked into binary. @@ -112,7 +122,15 @@ SwiftCompilationOutput = record( # Info required for `[swift-compilation-database]` subtarget. compilation_database = field(SwiftCompilationDatabase), # An artifact that represent the Swift module map for this target. - output_map_artifact = field([Artifact, None]), + output_map_artifact = field(Artifact | None), + # An optional artifact of the exported symbols emitted for this module. + exported_symbols = field(Artifact | None), + # An optional artifact with files that support consuming the generated library with later versions of the swift compiler. + swift_library_for_distribution_output = field(SwiftLibraryForDistributionOutput | None), + # A list of artifacts that stores the index data + index_stores = field(list[Artifact]), + # A list of artifacts of the swiftdeps files produced during incremental compilation. + swiftdeps = field(list[Artifact]), ) SwiftDebugInfo = record( @@ -120,7 +138,9 @@ SwiftDebugInfo = record( shared = list[ArtifactTSet], ) -REQUIRED_SDK_MODULES = ["Swift", "SwiftOnoneSupport", "Darwin", "_Concurrency", "_StringProcessing"] +_REQUIRED_SDK_MODULES = ["Swift", "SwiftOnoneSupport", "Darwin", "_Concurrency", "_StringProcessing"] + +_REQUIRED_SDK_CXX_MODULES = _REQUIRED_SDK_MODULES + ["std"] def get_swift_anonymous_targets(ctx: AnalysisContext, get_apple_library_providers: typing.Callable) -> Promise: swift_cxx_flags = get_swift_cxx_flags(ctx) @@ -129,7 +149,7 @@ def get_swift_anonymous_targets(ctx: AnalysisContext, get_apple_library_provider # all transitive deps will be compiled recursively. direct_uncompiled_sdk_deps = get_uncompiled_sdk_deps( ctx.attrs.sdk_modules, - REQUIRED_SDK_MODULES, + _REQUIRED_SDK_CXX_MODULES if ctx.attrs.enable_cxx_interop else _REQUIRED_SDK_MODULES, ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info, ) @@ -145,6 +165,7 @@ def get_swift_anonymous_targets(ctx: AnalysisContext, get_apple_library_provider # passing apple_library's cxx flags through that must be used for all downward PCM compilations. sdk_pcm_targets = get_swift_sdk_pcm_anon_targets( ctx, + ctx.attrs.enable_cxx_interop, direct_uncompiled_sdk_deps, swift_cxx_flags, ) @@ -157,16 +178,13 @@ def get_swift_anonymous_targets(ctx: AnalysisContext, get_apple_library_provider ) return ctx.actions.anon_targets(pcm_targets + sdk_pcm_targets + swift_interface_anon_targets).promise.map(get_apple_library_providers) -def _get_explicit_modules_forwards_warnings_as_errors() -> bool: - return read_root_config("swift", "explicit_modules_forwards_warnings_as_errors", "false").lower() == "true" - def get_swift_cxx_flags(ctx: AnalysisContext) -> list[str]: """Iterates through `swift_compiler_flags` and returns a list of flags that might affect Clang compilation""" gather, next = ([], False) # Each target needs to propagate the compilers target triple. # This can vary depending on the deployment target of each library. - gather += ["-target", get_versioned_target_triple(ctx)] + gather += ["-target", get_target_triple(ctx)] for f in ctx.attrs.swift_compiler_flags: if next: @@ -175,11 +193,13 @@ def get_swift_cxx_flags(ctx: AnalysisContext) -> list[str]: next = False elif str(f) == "\"-Xcc\"": next = True - elif _get_explicit_modules_forwards_warnings_as_errors() and str(f) == "\"-warnings-as-errors\"": + elif str(f) == "\"-warnings-as-errors\"": gather.append("-warnings-as-errors") + elif str(f) == "\"-no-warnings-as-errors\"": + gather.append("-no-warnings-as-errors") if ctx.attrs.enable_cxx_interop: - gather += ["-Xfrontend", "-enable-cxx-interop"] + gather += ["-cxx-interoperability-mode=default"] if ctx.attrs.swift_version != None: gather += ["-swift-version", ctx.attrs.swift_version] @@ -206,7 +226,7 @@ def compile_swift( # If a target exports ObjC headers and Swift explicit modules are enabled, # we need to precompile a PCM of the underlying module and supply it to the Swift compilation. - if objc_modulemap_pp_info and ctx.attrs.uses_explicit_modules: + if objc_modulemap_pp_info and uses_explicit_modules(ctx): underlying_swift_pcm_uncompiled_info = get_swift_pcm_uncompile_info( ctx, None, @@ -256,15 +276,25 @@ def compile_swift( output_header = ctx.actions.declare_output(module_name + "-Swift.h") output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) - if toolchain.can_toolchain_emit_obj_c_header_textually: - _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, output_header) - else: - unprocessed_header = ctx.actions.declare_output(module_name + "-SwiftUnprocessed.h") - _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, unprocessed_header) - _perform_swift_postprocessing(ctx, module_name, unprocessed_header, output_header) + swift_framework_output = None + if _should_compile_with_evolution(ctx): + swift_framework_output = SwiftLibraryForDistributionOutput( + swiftinterface = ctx.actions.declare_output(module_name + ".swiftinterface"), + private_swiftinterface = ctx.actions.declare_output(module_name + ".private.swiftinterface"), + swiftdoc = ctx.actions.declare_output(module_name + ".swiftdoc"), #this is generated automatically once we pass -emit-module-info, so must have this name + ) + + output_symbols = None + + if cxx_use_shlib_intfs_mode(ctx, ShlibInterfacesMode("stub_from_headers")): + output_symbols = ctx.actions.declare_output("__tbd__/" + module_name + ".swift_symbols.txt") + + _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, output_header, output_symbols, swift_framework_output) object_output = _compile_object(ctx, toolchain, shared_flags, srcs) + index_stores = _compile_index_stores(ctx, toolchain, shared_flags, srcs) + # Swift libraries extend the ObjC modulemaps to include the -Swift.h header modulemap_pp_info = preprocessor_info_for_modulemap(ctx, "swift-extended", exported_headers, output_header) exported_swift_header = CHeader( @@ -276,8 +306,7 @@ def compile_swift( exported_pp_info = CPreprocessor( headers = [exported_swift_header], modular_args = modulemap_pp_info.modular_args, - relative_args = CPreprocessorArgs(args = modulemap_pp_info.relative_args.args), - absolute_args = CPreprocessorArgs(args = modulemap_pp_info.absolute_args.args), + args = CPreprocessorArgs(args = modulemap_pp_info.args.args), modulemap_path = modulemap_pp_info.modulemap_path, ) @@ -296,39 +325,20 @@ def compile_swift( object_files = object_output.object_files, object_format = toolchain.object_format, swiftmodule = output_swiftmodule, - dependency_info = get_swift_dependency_info(ctx, exported_pp_info, output_swiftmodule, deps_providers), + dependency_info = get_swift_dependency_info(ctx, output_swiftmodule, deps_providers), pre = pre, exported_pre = exported_pp_info, + exported_swift_header = exported_swift_header.artifact, argsfiles = object_output.argsfiles, swift_debug_info = extract_and_merge_swift_debug_infos(ctx, deps_providers, [output_swiftmodule]), clang_debug_info = extract_and_merge_clang_debug_infos(ctx, deps_providers), - compilation_database = _create_compilation_database(ctx, srcs, object_output.argsfiles.absolute[SWIFT_EXTENSION]), + compilation_database = _create_compilation_database(ctx, srcs, object_output.argsfiles.relative[SWIFT_EXTENSION]), + exported_symbols = output_symbols, + swift_library_for_distribution_output = swift_framework_output, + index_stores = index_stores, + swiftdeps = object_output.swiftdeps, ), swift_interface_info) -# Swift headers are postprocessed to make them compatible with Objective-C -# compilation that does not use -fmodules. This is a workaround for the bad -# performance of -fmodules without Explicit Modules, once Explicit Modules is -# supported, this postprocessing should be removed. -def _perform_swift_postprocessing( - ctx: AnalysisContext, - module_name: str, - unprocessed_header: Artifact, - output_header: Artifact): - transitive_exported_headers = { - module: module_exported_headers - for exported_headers_map in _get_exported_headers_tset(ctx).traverse() - if exported_headers_map - for module, module_exported_headers in exported_headers_map.items() - } - deps_json = ctx.actions.write_json(module_name + "-Deps.json", transitive_exported_headers) - postprocess_cmd = cmd_args(ctx.attrs._apple_tools[AppleToolsInfo].swift_objc_header_postprocess) - postprocess_cmd.add([ - unprocessed_header, - deps_json, - output_header.as_output(), - ]) - ctx.actions.run(postprocess_cmd, category = "swift_objc_header_postprocess") - # We use separate actions for swiftmodule and object file output. This # improves build parallelism at the cost of duplicated work, but by disabling # type checking in function bodies the swiftmodule compilation can be done much @@ -339,14 +349,27 @@ def _compile_swiftmodule( shared_flags: cmd_args, srcs: list[CxxSrcWithFlags], output_swiftmodule: Artifact, - output_header: Artifact) -> CompileArgsfiles: + output_header: Artifact, + output_symbols: Artifact | None, + swift_framework_output: SwiftLibraryForDistributionOutput | None) -> CompileArgsfiles: argfile_cmd = cmd_args(shared_flags) argfile_cmd.add([ + "-disable-cmo", "-emit-module", - "-Xfrontend", - "-experimental-skip-non-inlinable-function-bodies-without-types", + "-experimental-emit-module-separately", + "-wmo", ]) + if ctx.attrs.swift_module_skip_function_bodies: + argfile_cmd.add([ + "-Xfrontend", + "-experimental-skip-non-inlinable-function-bodies-without-types", + ]) + + if _should_compile_with_evolution(ctx): + argfile_cmd.add(["-enable-library-evolution"]) + argfile_cmd.add(["-emit-module-interface"]) + cmd = cmd_args([ "-emit-objc-header", "-emit-objc-header-path", @@ -355,18 +378,41 @@ def _compile_swiftmodule( output_swiftmodule.as_output(), ]) - if should_build_swift_incrementally(ctx, len(srcs)): - incremental_compilation_output = get_incremental_swiftmodule_compilation_flags(ctx, srcs) - cmd.add(incremental_compilation_output.incremental_flags_cmd) - argfile_cmd.add([ - "-experimental-emit-module-separately", + if swift_framework_output: + # this is generated implicitly once we pass -emit-module + cmd.add(cmd_args(hidden = swift_framework_output.swiftdoc.as_output())) + cmd.add([ + "-emit-parseable-module-interface-path", + swift_framework_output.swiftinterface.as_output(), + "-emit-private-module-interface-path", + swift_framework_output.private_swiftinterface.as_output(), ]) - else: - argfile_cmd.add([ - "-wmo", + + output_tbd = None + if output_symbols != None: + # Two step process, first we need to emit the TBD + output_tbd = ctx.actions.declare_output("__tbd__/" + ctx.attrs.name + "-Swift.tbd") + cmd.add([ + "-emit-tbd", + "-emit-tbd-path", + output_tbd.as_output(), + ]) + + ret = _compile_with_argsfile(ctx, "swiftmodule_compile", SWIFTMODULE_EXTENSION, argfile_cmd, srcs, cmd, toolchain, num_threads = 1) + + if output_tbd != None: + # Now we have run the TBD action we need to extract the symbols + extract_cmd = cmd_args([ + get_cxx_toolchain_info(ctx).linker_info.mk_shlib_intf[RunInfo], + "extract", + "-o", + output_symbols.as_output(), + "--tbd", + output_tbd, ]) + ctx.actions.run(extract_cmd, category = "extract_tbd_symbols", error_handler = apple_build_error_handler) - return _compile_with_argsfile(ctx, "swiftmodule_compile", SWIFTMODULE_EXTENSION, argfile_cmd, srcs, cmd, toolchain) + return ret def _compile_object( ctx: AnalysisContext, @@ -375,11 +421,15 @@ def _compile_object( srcs: list[CxxSrcWithFlags]) -> SwiftObjectOutput: if should_build_swift_incrementally(ctx, len(srcs)): incremental_compilation_output = get_incremental_object_compilation_flags(ctx, srcs) + num_threads = incremental_compilation_output.num_threads output_map_artifact = incremental_compilation_output.output_map_artifact objects = incremental_compilation_output.artifacts cmd = incremental_compilation_output.incremental_flags_cmd + swiftdeps = incremental_compilation_output.swiftdeps else: + num_threads = 1 output_map_artifact = None + swiftdeps = [] output_object = ctx.actions.declare_output(get_module_name(ctx) + ".o") objects = [output_object] object_format = toolchain.object_format.value @@ -398,14 +448,69 @@ def _compile_object( if embed_bitcode: cmd.add("--embed-bitcode") - argsfiles = _compile_with_argsfile(ctx, "swift_compile", SWIFT_EXTENSION, shared_flags, srcs, cmd, toolchain) + if _should_compile_with_evolution(ctx): + cmd.add(["-enable-library-evolution"]) + + argsfiles = _compile_with_argsfile(ctx, "swift_compile", SWIFT_EXTENSION, shared_flags, srcs, cmd, toolchain, num_threads = num_threads) return SwiftObjectOutput( object_files = objects, argsfiles = argsfiles, output_map_artifact = output_map_artifact, + swiftdeps = swiftdeps, ) +def _compile_index_stores( + ctx: AnalysisContext, + toolchain: SwiftToolchainInfo, + shared_flags: cmd_args, + srcs: list[CxxSrcWithFlags]) -> list[Artifact]: + index_stores = [] + for src in srcs: + additional_flags = cmd_args() + + # With -index-file flag, swiftc will not go through all phases of the compiler + # and will not ouput anything except the index data + # The output here is only used for the identifier of the index unit file + # The output path is used for computing the hash value in the unit file name + output_name = paths.join( + ctx.label.cell, + ctx.label.package, + ctx.label.name, + "{}.indexData".format(src.file.short_path), + ) + additional_flags.add(["-o", output_name]) + + index_store_folder_name = paths.join("__indexstore__", get_module_name(ctx), src.file.short_path, "index_store") + index_store = ctx.actions.declare_output(index_store_folder_name, dir = True) + + additional_flags.add([ + "-index-file", + "-index-ignore-system-modules", + "-index-store-path", + index_store.as_output(), + ]) + + # -index-file-path only can accept one file, so we need to build index data for each source file + additional_flags.add([ + "-index-file-path", + src.file, + ]) + + _compile_with_argsfile( + ctx, + "swift_index_compile", + index_store_folder_name, + shared_flags, + srcs, + additional_flags, + toolchain, + index_store_folder_name, + ) + index_stores.append(index_store) + + return index_stores + def _compile_with_argsfile( ctx: AnalysisContext, category_prefix: str, @@ -413,11 +518,13 @@ def _compile_with_argsfile( shared_flags: cmd_args, srcs: list[CxxSrcWithFlags], additional_flags: cmd_args, - toolchain: SwiftToolchainInfo) -> CompileArgsfiles: + toolchain: SwiftToolchainInfo, + identifier: str | None = None, + num_threads: int = 1) -> CompileArgsfiles: shell_quoted_args = cmd_args(shared_flags, quote = "shell") - argsfile, _ = ctx.actions.write(extension + ".argsfile", shell_quoted_args, allow_args = True) + argsfile, _ = ctx.actions.write(extension + "_compile_argsfile", shell_quoted_args, allow_args = True) input_args = [shared_flags] - cmd_form = cmd_args(cmd_args(argsfile, format = "@{}", delimiter = "")).hidden(input_args) + cmd_form = cmd_args(cmd_args(argsfile, format = "@{}", delimiter = ""), hidden = input_args) cmd_form.add([s.file for s in srcs]) cmd = cmd_args(toolchain.compiler) @@ -428,20 +535,48 @@ def _compile_with_argsfile( # so that CI builds populate caches used by developer machines. explicit_modules_enabled = uses_explicit_modules(ctx) + build_swift_incrementally = should_build_swift_incrementally(ctx, len(srcs)) + + # When Swift code is built incrementally, the swift-driver embeds absolute paths into the artifacts. + # Unfortunately, this compels us to execute these actions locally. + run_extra_args = { + # Even though the incremental artifacts (`.priors`, `.swiftdeps`) contain abs paths and + # are not cacheable, it's actually fine to still upload to the cache. This is because + # the downside of cached incremental artifacts with abs paths is that it will perform + # a full module compile on the first source change in a module (or any of its transitive + # deps where the public API changes). But this is exactly what would happen if we did not + # allow any caching at all - instead, every cold build would have to rebuild *everything* + # as there will be zero caching (as all incremental actions must run locally and do not + # allow cache upload). + # + # Thus, by allowing cache uploads, we get cold build caching, even if we end up caching + # non-hermetic Swift incremental artifacts. It's just that those non-hermetic artifacts + # do not result in further build perf efficiency later on when modules need to be recompiled. + "allow_cache_upload": True, + } + if build_swift_incrementally and not toolchain.supports_relative_resource_dir: + # When adding -working-directory= we end up with absolute paths in the + # swiftdeps files. + run_extra_args["local_only"] = True + else: + # Swift compilation on RE without explicit modules is impractically expensive + # because there's no shared module cache across different libraries. + run_extra_args["prefer_local"] = not explicit_modules_enabled + # Make it easier to debug whether Swift actions get compiled with explicit modules or not category = category_prefix + ("_with_explicit_mods" if explicit_modules_enabled else "") ctx.actions.run( cmd, category = category, - # Swift compilation on RE without explicit modules is impractically expensive - # because there's no shared module cache across different libraries. - prefer_local = not explicit_modules_enabled, - allow_cache_upload = True, + identifier = identifier, # When building incrementally, we need to preserve local state between invocations. - no_outputs_cleanup = should_build_swift_incrementally(ctx, len(srcs)), + no_outputs_cleanup = build_swift_incrementally, + error_handler = apple_build_error_handler, + weight = num_threads, + **run_extra_args ) - relative_argsfile = CompileArgsfile( + argsfile = CompileArgsfile( file = argsfile, cmd_form = cmd_form, input_args = input_args, @@ -449,8 +584,13 @@ def _compile_with_argsfile( args_without_file_prefix_args = shared_flags, ) - # Swift correctly handles relative paths and we can utilize the relative argsfile for absolute paths. - return CompileArgsfiles(relative = {extension: relative_argsfile}, absolute = {extension: relative_argsfile}) + # Swift correctly handles relative paths and we can utilize the relative argsfile for Xcode. + return CompileArgsfiles(relative = {extension: argsfile}, xcode = {extension: argsfile}) + +def _get_serialize_debugging_options_attr_value(ctx: AnalysisContext): + if ctx.attrs.serialize_debugging_options == None: + return True + return ctx.attrs.serialize_debugging_options def _get_shared_flags( ctx: AnalysisContext, @@ -471,13 +611,15 @@ def _get_shared_flags( cmd.add(["-working-directory="]) cmd.add([ + # Always use color, consistent with clang. + "-color-diagnostics", # Unset the working directory in the debug information. "-file-compilation-dir", ".", "-sdk", toolchain.sdk_path, "-target", - get_versioned_target_triple(ctx), + get_target_triple(ctx), "-module-name", module_name, "-Xfrontend", @@ -489,7 +631,14 @@ def _get_shared_flags( "-parse-as-library", ]) - if uses_explicit_modules(ctx): + if ctx.attrs.swift_package_name != None: + cmd.add([ + "-package-name", + ctx.attrs.swift_package_name, + ]) + + explicit_modules_enabled = uses_explicit_modules(ctx) + if explicit_modules_enabled: cmd.add([ "-Xcc", "-Xclang", @@ -522,7 +671,7 @@ def _get_shared_flags( else: cmd.add(["-enable-experimental-cxx-interop"]) - serialize_debugging_options = ctx.attrs.serialize_debugging_options and not objc_headers and toolchain.prefix_serialized_debugging_options + serialize_debugging_options = _get_serialize_debugging_options_attr_value(ctx) and (not explicit_modules_enabled) and (not objc_headers) and toolchain.prefix_serialized_debugging_options if serialize_debugging_options: cmd.add([ "-Xfrontend", @@ -548,7 +697,7 @@ def _get_shared_flags( "-disable-cxx-interop-requirement-at-import", ]) - if toolchain.supports_swift_importing_objc_forward_declarations and ctx.attrs.import_obj_c_forward_declarations: + if toolchain.supports_swift_importing_objc_forward_declarations and ctx.attrs.import_obj_c_forward_declarations and ctx.attrs.swift_version != "6": cmd.add([ "-Xfrontend", "-enable-upcoming-feature", @@ -557,12 +706,20 @@ def _get_shared_flags( ]) pcm_deps_tset = get_compiled_pcm_deps_tset(ctx, deps_providers) - sdk_clang_deps_tset = get_compiled_sdk_clang_deps_tset(ctx, deps_providers) - sdk_swift_deps_tset = get_compiled_sdk_swift_deps_tset(ctx, deps_providers) - # Add flags required to import ObjC module dependencies - _add_clang_deps_flags(ctx, pcm_deps_tset, sdk_clang_deps_tset, cmd) - _add_swift_deps_flags(ctx, sdk_swift_deps_tset, cmd) + # If Swift Explicit Modules are enabled, a few things must be provided to a compilation job: + # 1. Direct and transitive SDK deps from `sdk_modules` attribute. + # 2. Direct and transitive user-defined deps. + # 3. Transitive SDK deps of user-defined deps. + # (This is the case, when a user-defined dep exports a type from SDK module, + # thus such SDK module should be implicitly visible to consumers of that custom dep) + if uses_explicit_modules(ctx): + sdk_clang_deps_tset = get_compiled_sdk_clang_deps_tset(ctx, deps_providers) + sdk_swift_deps_tset = get_compiled_sdk_swift_deps_tset(ctx, deps_providers) + _add_swift_module_map_args(ctx, sdk_swift_deps_tset, pcm_deps_tset, sdk_clang_deps_tset, cmd) + + _add_clang_deps_flags(ctx, pcm_deps_tset, cmd) + _add_swift_deps_flags(ctx, cmd) # Add flags for importing the ObjC part of this library _add_mixed_library_flags_to_cmd(ctx, cmd, underlying_module, objc_headers, objc_modulemap_pp_info) @@ -574,28 +731,34 @@ def _get_shared_flags( return cmd -def _add_swift_deps_flags( +def _add_swift_module_map_args( ctx: AnalysisContext, + sdk_swiftmodule_deps_tset: SwiftCompiledModuleTset, + pcm_deps_tset: SwiftCompiledModuleTset, sdk_deps_tset: SwiftCompiledModuleTset, cmd: cmd_args): - # If Explicit Modules are enabled, a few things must be provided to a compilation job: - # 1. Direct and transitive SDK deps from `sdk_modules` attribute. - # 2. Direct and transitive user-defined deps. - # 3. Transitive SDK deps of user-defined deps. - # (This is the case, when a user-defined dep exports a type from SDK module, - # thus such SDK module should be implicitly visible to consumers of that custom dep) + module_name = get_module_name(ctx) + sdk_swiftmodule_deps_tset = [sdk_swiftmodule_deps_tset] if sdk_swiftmodule_deps_tset else [] + all_deps_tset = ctx.actions.tset( + SwiftCompiledModuleTset, + children = _get_swift_paths_tsets(ctx.attrs.deps + ctx.attrs.exported_deps) + [pcm_deps_tset, sdk_deps_tset] + sdk_swiftmodule_deps_tset, + ) + swift_module_map_artifact = write_swift_module_map_with_deps( + ctx, + module_name, + all_deps_tset, + ) + cmd.add([ + "-Xfrontend", + "-explicit-swift-module-map-file", + "-Xfrontend", + swift_module_map_artifact, + ]) + +def _add_swift_deps_flags( + ctx: AnalysisContext, + cmd: cmd_args): if uses_explicit_modules(ctx): - module_name = get_module_name(ctx) - swift_deps_tset = ctx.actions.tset( - SwiftCompiledModuleTset, - children = _get_swift_paths_tsets(ctx.attrs.deps + ctx.attrs.exported_deps), - ) - swift_module_map_artifact = write_swift_module_map_with_swift_deps( - ctx, - module_name, - sdk_deps_tset, - swift_deps_tset, - ) cmd.add([ "-Xcc", "-fno-implicit-modules", @@ -603,10 +766,6 @@ def _add_swift_deps_flags( "-fno-implicit-module-maps", "-Xfrontend", "-disable-implicit-swift-modules", - "-Xfrontend", - "-explicit-swift-module-map-file", - "-Xfrontend", - swift_module_map_artifact, ]) else: depset = ctx.actions.tset(SwiftCompiledModuleTset, children = _get_swift_paths_tsets(ctx.attrs.deps + ctx.attrs.exported_deps)) @@ -615,15 +774,9 @@ def _add_swift_deps_flags( def _add_clang_deps_flags( ctx: AnalysisContext, pcm_deps_tset: SwiftCompiledModuleTset, - sdk_deps_tset: SwiftCompiledModuleTset, cmd: cmd_args) -> None: - # If a module uses Explicit Modules, all direct and - # transitive Clang deps have to be explicitly added. if uses_explicit_modules(ctx): - cmd.add(pcm_deps_tset.project_as_args("clang_deps")) - - # Add Clang sdk modules which do not go to swift modulemap - cmd.add(sdk_deps_tset.project_as_args("clang_deps")) + cmd.add(pcm_deps_tset.project_as_args("clang_importer_flags")) else: inherited_preprocessor_infos = cxx_inherited_preprocessor_infos(ctx.attrs.deps + ctx.attrs.exported_deps) preprocessors = cxx_merge_cpreprocessors(ctx, [], inherited_preprocessor_infos) @@ -640,6 +793,7 @@ def _add_mixed_library_flags_to_cmd( if uses_explicit_modules(ctx): if underlying_module: cmd.add(underlying_module.clang_importer_args) + cmd.add(underlying_module.clang_module_file_args) cmd.add("-import-underlying-module") return @@ -651,7 +805,7 @@ def _add_mixed_library_flags_to_cmd( # the debugger as they require absolute paths. Instead we will enforce # that mixed libraries do not have serialized debugging info and rely on # rdeps to serialize the correct paths. - for arg in objc_modulemap_pp_info.relative_args.args: + for arg in objc_modulemap_pp_info.args.args: cmd.add("-Xcc") cmd.add(arg) @@ -675,17 +829,6 @@ def _get_external_debug_info_tsets(deps: list[Dependency]) -> list[ArtifactTSet] if SwiftDependencyInfo in d ] -def _get_exported_headers_tset(ctx: AnalysisContext, exported_headers: [list[str], None] = None) -> ExportedHeadersTSet: - return ctx.actions.tset( - ExportedHeadersTSet, - value = {get_module_name(ctx): exported_headers} if exported_headers else None, - children = [ - dep.exported_headers - for dep in [x.get(SwiftDependencyInfo) for x in _exported_deps(ctx)] - if dep and dep.exported_headers - ], - ) - def get_swift_pcm_uncompile_info( ctx: AnalysisContext, propagated_exported_preprocessor_info: [CPreprocessorInfo, None], @@ -706,18 +849,10 @@ def get_swift_pcm_uncompile_info( def get_swift_dependency_info( ctx: AnalysisContext, - exported_pre: [CPreprocessor, None], - output_module: [Artifact, None], + output_module: Artifact | None, deps_providers: list) -> SwiftDependencyInfo: exported_deps = _exported_deps(ctx) - # We only need to pass up the exported_headers for Swift header post-processing. - # If the toolchain can emit textual imports already then we skip the extra work. - exported_headers = [] - if not ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info.can_toolchain_emit_obj_c_header_textually: - exported_headers = [_header_basename(header) for header in ctx.attrs.exported_headers] - exported_headers += [header.name for header in exported_pre.headers] if exported_pre else [] - # We pass through the SDK swiftmodules here to match Buck 1 behaviour. This is # pretty loose, but it matches Buck 1 behavior so cannot be improved until # migration is complete. @@ -725,6 +860,7 @@ def get_swift_dependency_info( if output_module: compiled_info = SwiftCompiledModuleInfo( is_framework = False, + is_sdk_module = False, is_swiftmodule = True, module_name = get_module_name(ctx), output_artifact = output_module, @@ -738,20 +874,14 @@ def get_swift_dependency_info( artifacts = [output_module] if output_module != None else [], children = _get_external_debug_info_tsets(ctx.attrs.deps + ctx.attrs.exported_deps), label = ctx.label, + tags = [ArtifactInfoTag("swiftmodule")], ) return SwiftDependencyInfo( debug_info_tset = debug_info_tset, - exported_headers = _get_exported_headers_tset(ctx, exported_headers), exported_swiftmodules = exported_swiftmodules, ) -def _header_basename(header: [Artifact, str]) -> str: - if type(header) == type(""): - return paths.basename(header) - else: - return header.basename - def uses_explicit_modules(ctx: AnalysisContext) -> bool: swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info return ctx.attrs.uses_explicit_modules and is_sdk_modules_provided(swift_toolchain) @@ -794,7 +924,7 @@ def get_swift_debug_infos( ctx: AnalysisContext, swift_dependency_info: [SwiftDependencyInfo, None], swift_output: [SwiftCompilationOutput, None]) -> SwiftDebugInfo: - # When determing the debug info for shared libraries, if the shared library is a link group, we rely on the link group links to + # When determining the debug info for shared libraries, if the shared library is a link group, we rely on the link group links to # obtain the debug info for linked libraries and only need to provide any swift debug info for this library itself. Otherwise # if linking standard shared, we need to obtain the transitive debug info. if get_link_group(ctx): @@ -835,7 +965,12 @@ def _create_compilation_database( cmd.add("--") cmd.add(argfile.cmd_form) - ctx.actions.run(cmd, category = "swift_compilation_database", identifier = identifier) + ctx.actions.run( + cmd, + category = "swift_compilation_database", + identifier = identifier, + error_handler = apple_build_error_handler, + ) return SwiftCompilationDatabase(db = cdb_artifact, other_outputs = argfile.cmd_form) @@ -846,10 +981,10 @@ def _create_swift_interface(ctx: AnalysisContext, shared_flags: cmd_args, module return DefaultInfo() mk_swift_interface = swift_toolchain.mk_swift_interface - identifier = module_name + ".interface.swift" + identifier = module_name + ".swift_interface" argsfile, _ = ctx.actions.write( - identifier + ".argsfile", + identifier + "_argsfile", shared_flags, allow_args = True, ) @@ -864,13 +999,14 @@ def _create_swift_interface(ctx: AnalysisContext, shared_flags: cmd_args, module "--out", interface_artifact.as_output(), "--", - cmd_args(cmd_args(argsfile, format = "@{}", delimiter = "")).hidden([shared_flags]), + cmd_args(cmd_args(argsfile, format = "@{}", delimiter = ""), hidden = [shared_flags]), ) ctx.actions.run( mk_swift_args, category = "mk_swift_interface", identifier = identifier, + error_handler = apple_build_error_handler, ) return DefaultInfo( @@ -885,3 +1021,8 @@ def _exported_deps(ctx) -> list[Dependency]: return ctx.attrs.exported_deps + ctx.attrs.deps else: return ctx.attrs.exported_deps + +def _should_compile_with_evolution(ctx) -> bool: + if ctx.attrs.enable_library_evolution != None: + return ctx.attrs.enable_library_evolution + return ctx.attrs._enable_library_evolution diff --git a/prelude/apple/swift/swift_debug_info_utils.bzl b/prelude/apple/swift/swift_debug_info_utils.bzl index 828f118fdf1..50e9d26adef 100644 --- a/prelude/apple/swift/swift_debug_info_utils.bzl +++ b/prelude/apple/swift/swift_debug_info_utils.bzl @@ -7,6 +7,7 @@ load( "@prelude//:artifact_tset.bzl", + "ArtifactInfoTag", "ArtifactTSet", "make_artifact_tset", ) @@ -27,6 +28,7 @@ def extract_and_merge_swift_debug_infos(ctx: AnalysisContext, compiled_pcm_deps_ label = ctx.label, artifacts = artifacts, children = swift_debug_tsets, + tags = [ArtifactInfoTag("swiftmodule")], ) def extract_and_merge_clang_debug_infos(ctx: AnalysisContext, compiled_pcm_deps_providers, artifacts: list[Artifact] = []) -> ArtifactTSet: @@ -41,4 +43,5 @@ def extract_and_merge_clang_debug_infos(ctx: AnalysisContext, compiled_pcm_deps_ label = ctx.label, artifacts = artifacts, children = clang_debug_tsets, + tags = [ArtifactInfoTag("swift_pcm")], ) diff --git a/prelude/apple/swift/swift_incremental_support.bzl b/prelude/apple/swift/swift_incremental_support.bzl index e0f8cf98be8..452892714a8 100644 --- a/prelude/apple/swift/swift_incremental_support.bzl +++ b/prelude/apple/swift/swift_incremental_support.bzl @@ -10,14 +10,13 @@ load("@prelude//apple:apple_utility.bzl", "get_module_name") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftObjectFormat") load("@prelude//apple/swift:swift_types.bzl", "SwiftCompilationModes") load( - "@prelude//cxx:compile.bzl", - "CxxSrcWithFlags", + "@prelude//cxx:cxx_sources.bzl", + "CxxSrcWithFlags", # @unused Used as a type ) _WriteOutputFileMapOutput = record( artifacts = field(list[Artifact]), swiftdeps = field(list[Artifact]), - main_swiftdeps = field(Artifact), output_map_artifact = field(Artifact), ) @@ -25,11 +24,21 @@ IncrementalCompilationOutput = record( incremental_flags_cmd = field(cmd_args), artifacts = field(list[Artifact]), output_map_artifact = field(Artifact), + num_threads = field(int), + swiftdeps = field(list[Artifact]), ) SwiftCompilationMode = enum(*SwiftCompilationModes) -SwiftIncrementalBuildFilesTreshold = 20 +_INCREMENTAL_SRC_THRESHOLD = 20 + +# The maxmium number of threads, we don't use +# host_info to prevent cache misses across +# different hardware models. +_MAX_NUM_THREADS = 4 + +# The maximum number of srcs per parallel action +_SRCS_PER_THREAD = 50 def should_build_swift_incrementally(ctx: AnalysisContext, srcs_count: int) -> bool: toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info @@ -43,39 +52,46 @@ def should_build_swift_incrementally(ctx: AnalysisContext, srcs_count: int) -> b return False elif mode == SwiftCompilationMode("incremental"): return True - return srcs_count >= SwiftIncrementalBuildFilesTreshold + return srcs_count >= _INCREMENTAL_SRC_THRESHOLD def get_incremental_object_compilation_flags(ctx: AnalysisContext, srcs: list[CxxSrcWithFlags]) -> IncrementalCompilationOutput: output_file_map = _write_output_file_map(ctx, get_module_name(ctx), srcs, "object", ".o") - return _get_incremental_compilation_flags_and_objects(output_file_map, cmd_args(["-emit-object"])) + return _get_incremental_compilation_flags_and_objects(output_file_map, len(srcs), cmd_args(["-emit-object"])) -def get_incremental_swiftmodule_compilation_flags(ctx: AnalysisContext, srcs: list[CxxSrcWithFlags]) -> IncrementalCompilationOutput: - output_file_map = _write_output_file_map(ctx, get_module_name(ctx), srcs, "swiftmodule", ".swiftmodule") - return _get_incremental_compilation_flags_and_objects(output_file_map, cmd_args()) +def _get_incremental_num_threads(num_srcs: int) -> int: + if num_srcs == 0: + return 1 + + src_threads = (num_srcs + _SRCS_PER_THREAD - 1) // _SRCS_PER_THREAD + return min(_MAX_NUM_THREADS, src_threads) def _get_incremental_compilation_flags_and_objects( output_file_map: _WriteOutputFileMapOutput, + num_srcs: int, additional_flags: cmd_args) -> IncrementalCompilationOutput: - cmd = cmd_args([ - "-incremental", - "-enable-incremental-imports", - "-disable-cmo", # To minimize changes in generated swiftmodule file. - "-enable-batch-mode", - "-driver-batch-count", - "1", - "-output-file-map", - output_file_map.output_map_artifact, - ]) - cmd.add(additional_flags) - - cmd = cmd.hidden([swiftdep.as_output() for swiftdep in output_file_map.swiftdeps]) - cmd = cmd.hidden([artifact.as_output() for artifact in output_file_map.artifacts]) - cmd = cmd.hidden(output_file_map.main_swiftdeps.as_output()) + num_threads = _get_incremental_num_threads(num_srcs) + cmd = cmd_args( + [ + "-incremental", + "-enable-incremental-imports", + "-disable-cmo", # To minimize changes in generated swiftmodule file. + "-enable-batch-mode", + "-output-file-map", + output_file_map.output_map_artifact, + "-j", + str(num_threads), + additional_flags, + ], + hidden = [swiftdep.as_output() for swiftdep in output_file_map.swiftdeps] + + [artifact.as_output() for artifact in output_file_map.artifacts], + ) return IncrementalCompilationOutput( incremental_flags_cmd = cmd, artifacts = output_file_map.artifacts, output_map_artifact = output_file_map.output_map_artifact, + num_threads = num_threads, + swiftdeps = output_file_map.swiftdeps, ) def _write_output_file_map( @@ -86,7 +102,6 @@ def _write_output_file_map( extension: str) -> _WriteOutputFileMapOutput: # Either ".o" or ".swiftmodule" # swift-driver doesn't respect extension for root swiftdeps file and it always has to be `.priors`. module_swiftdeps = ctx.actions.declare_output("module-build-record." + compilation_mode + ".priors") - output_file_map = { "": { "swift-dependencies": module_swiftdeps, @@ -94,7 +109,7 @@ def _write_output_file_map( } artifacts = [] - swiftdeps = [] + swiftdeps = [module_swiftdeps] for src in srcs: file_name = src.file.basename output_artifact = ctx.actions.declare_output(file_name + extension) @@ -113,6 +128,5 @@ def _write_output_file_map( return _WriteOutputFileMapOutput( artifacts = artifacts, swiftdeps = swiftdeps, - main_swiftdeps = module_swiftdeps, output_map_artifact = output_map_artifact, ) diff --git a/prelude/apple/swift/swift_module_map.bzl b/prelude/apple/swift/swift_module_map.bzl index 5547232f07d..dd60747cfe8 100644 --- a/prelude/apple/swift/swift_module_map.bzl +++ b/prelude/apple/swift/swift_module_map.bzl @@ -8,22 +8,10 @@ load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type load(":swift_toolchain_types.bzl", "SwiftCompiledModuleTset") -def write_swift_module_map( +def write_swift_module_map_with_deps( ctx: AnalysisContext, module_name: str, - sdk_deps: SwiftCompiledModuleTset) -> ArgLike: - return write_swift_module_map_with_swift_deps(ctx, module_name, sdk_deps, None) - -def write_swift_module_map_with_swift_deps( - ctx: AnalysisContext, - module_name: str, - sdk_swift_deps: SwiftCompiledModuleTset, - swift_deps: [SwiftCompiledModuleTset, None]) -> ArgLike: - if swift_deps: - all_deps = ctx.actions.tset(SwiftCompiledModuleTset, children = [sdk_swift_deps, swift_deps]) - else: - all_deps = sdk_swift_deps - + all_deps: SwiftCompiledModuleTset) -> ArgLike: return ctx.actions.write_json( module_name + ".swift_module_map.json", all_deps.project_as_json("swift_module_map"), diff --git a/prelude/apple/swift/swift_pcm_compilation.bzl b/prelude/apple/swift/swift_pcm_compilation.bzl index 01626fce7eb..8a8079758b8 100644 --- a/prelude/apple/swift/swift_pcm_compilation.bzl +++ b/prelude/apple/swift/swift_pcm_compilation.bzl @@ -23,6 +23,8 @@ load(":swift_toolchain_types.bzl", "SwiftCompiledModuleInfo", "SwiftCompiledModu _REQUIRED_SDK_MODULES = ["Foundation"] +_REQUIRED_SDK_CXX_MODULES = _REQUIRED_SDK_MODULES + ["std"] + def get_compiled_pcm_deps_tset(ctx: AnalysisContext, pcm_deps_providers: list) -> SwiftCompiledModuleTset: pcm_deps = [ pcm_deps_provider[WrappedSwiftPCMCompiledInfo].tset @@ -38,6 +40,7 @@ def get_swift_pcm_anon_targets( deps = [ { "dep": uncompiled_dep, + "enable_cxx_interop": ctx.attrs.enable_cxx_interop, "name": uncompiled_dep.label, "swift_cxx_args": swift_cxx_args, "_apple_toolchain": ctx.attrs._apple_toolchain, @@ -54,16 +57,16 @@ def _compile_with_argsfile( args: cmd_args, additional_cmd: cmd_args): shell_quoted_cmd = cmd_args(args, quote = "shell") - argfile, _ = ctx.actions.write(module_name + ".pcm.argsfile", shell_quoted_cmd, allow_args = True) + argfile, _ = ctx.actions.write(module_name + ".swift_pcm_argsfile", shell_quoted_cmd, allow_args = True) swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info - cmd = cmd_args(swift_toolchain.compiler) - cmd.add(cmd_args(["@", argfile], delimiter = "")) - - # Action should also depend on all artifacts from the argsfile, otherwise they won't be materialised. - cmd.hidden([args]) - - cmd.add(additional_cmd) + cmd = cmd_args( + swift_toolchain.compiler, + cmd_args(["@", argfile], delimiter = ""), + additional_cmd, + # Action should also depend on all artifacts from the argsfile, otherwise they won't be materialised. + hidden = args, + ) ctx.actions.run( cmd, @@ -77,9 +80,9 @@ def _compiled_module_info( module_name: str, pcm_output: Artifact, pcm_info: SwiftPCMUncompiledInfo) -> SwiftCompiledModuleInfo: - clang_importer_args = cmd_args() - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args = cmd_args() + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-file=", @@ -90,8 +93,8 @@ def _compiled_module_info( delimiter = "", ), ) - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-map-file=", @@ -100,16 +103,22 @@ def _compiled_module_info( delimiter = "", ), ) - clang_importer_args.add("-Xcc") - clang_importer_args.add(pcm_info.exported_preprocessor.relative_args.args) - clang_importer_args.hidden(pcm_info.exported_preprocessor.modular_args) + + clang_importer_args = cmd_args( + "-Xcc", + pcm_info.exported_preprocessor.args.args, + hidden = pcm_info.exported_preprocessor.modular_args, + ) return SwiftCompiledModuleInfo( + clang_module_file_args = clang_deps_args, clang_importer_args = clang_importer_args, is_framework = False, + is_sdk_module = False, is_swiftmodule = False, module_name = module_name, output_artifact = pcm_output, + clang_modulemap = pcm_info.exported_preprocessor.modulemap_path, ) def _swift_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: @@ -181,15 +190,17 @@ def _swift_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Provider ), ] + required_sdk_modules = _REQUIRED_SDK_CXX_MODULES if ctx.attrs.enable_cxx_interop else _REQUIRED_SDK_MODULES direct_uncompiled_sdk_deps = get_uncompiled_sdk_deps( ctx.attrs.dep[SwiftPCMUncompiledInfo].uncompiled_sdk_modules, - _REQUIRED_SDK_MODULES, + required_sdk_modules, ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info, ) # Recursively compiling SDK's Clang dependencies sdk_pcm_deps_anon_targets = get_swift_sdk_pcm_anon_targets( ctx, + ctx.attrs.enable_cxx_interop, direct_uncompiled_sdk_deps, ctx.attrs.swift_cxx_args, ) @@ -206,6 +217,7 @@ _swift_pcm_compilation = rule( impl = _swift_pcm_compilation_impl, attrs = { "dep": attrs.dep(), + "enable_cxx_interop": attrs.bool(), "swift_cxx_args": attrs.list(attrs.string(), default = []), "_apple_toolchain": attrs.dep(), }, @@ -240,7 +252,7 @@ def compile_underlying_pcm( "-Xcc", "-I", "-Xcc", - cmd_args([cmd_args(modulemap_path).parent(), "exported_symlink_tree"], delimiter = "/"), + cmd_args([cmd_args(modulemap_path, parent = 1), "exported_symlink_tree"], delimiter = "/"), ]) cmd.add(framework_search_path_flags) @@ -261,41 +273,38 @@ def _get_base_pcm_flags( pcm_deps_tset: SwiftCompiledModuleTset, swift_cxx_args: list[str]) -> (cmd_args, cmd_args, Artifact): swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info + modulemap_path = uncompiled_pcm_info.exported_preprocessor.modulemap_path + pcm_output = ctx.actions.declare_output(module_name + ".pcm") - cmd = cmd_args() - cmd.add(get_shared_pcm_compilation_args(module_name)) - cmd.add(["-sdk", swift_toolchain.sdk_path]) - cmd.add(swift_toolchain.compiler_flags) - - if swift_toolchain.resource_dir: - cmd.add([ + cmd = cmd_args( + get_shared_pcm_compilation_args(module_name), + ["-sdk", swift_toolchain.sdk_path], + swift_toolchain.compiler_flags, + ([ "-resource-dir", swift_toolchain.resource_dir, - ]) - - cmd.add(sdk_deps_tset.project_as_args("clang_deps")) - cmd.add(pcm_deps_tset.project_as_args("clang_deps")) + ] if swift_toolchain.resource_dir else []), + sdk_deps_tset.project_as_args("clang_module_file_flags"), + pcm_deps_tset.project_as_args("clang_module_file_flags"), + pcm_deps_tset.project_as_args("clang_importer_flags"), + # To correctly resolve modulemap's headers, + # a search path to the root of modulemap should be passed. + [ + "-Xcc", + "-I", + "-Xcc", + cmd_args(modulemap_path, parent = 1), + ], + # Modular deps like `-Swift.h` have to be materialized. + hidden = uncompiled_pcm_info.exported_preprocessor.modular_args, + ) - modulemap_path = uncompiled_pcm_info.exported_preprocessor.modulemap_path - pcm_output = ctx.actions.declare_output(module_name + ".pcm") + cmd.add(swift_cxx_args) - additional_cmd = cmd_args(swift_cxx_args) - additional_cmd.add([ + additional_cmd = cmd_args( "-o", pcm_output.as_output(), modulemap_path, - ]) - - # To correctly resolve modulemap's headers, - # a search path to the root of modulemap should be passed. - cmd.add([ - "-Xcc", - "-I", - "-Xcc", - cmd_args(modulemap_path).parent(), - ]) - - # Modular deps like `-Swift.h` have to be materialized. - cmd.hidden(uncompiled_pcm_info.exported_preprocessor.modular_args) + ) return (cmd, additional_cmd, pcm_output) diff --git a/prelude/apple/swift/swift_sdk_pcm_compilation.bzl b/prelude/apple/swift/swift_sdk_pcm_compilation.bzl index c96606c8981..934a721f38a 100644 --- a/prelude/apple/swift/swift_sdk_pcm_compilation.bzl +++ b/prelude/apple/swift/swift_sdk_pcm_compilation.bzl @@ -90,6 +90,7 @@ def _add_sdk_module_search_path(cmd, uncompiled_sdk_module_info, apple_toolchain def get_swift_sdk_pcm_anon_targets( ctx: AnalysisContext, + enable_cxx_interop: bool, uncompiled_sdk_deps: list[Dependency], swift_cxx_args: list[str]): # We include the Swift deps here too as we need @@ -97,6 +98,7 @@ def get_swift_sdk_pcm_anon_targets( return [ (_swift_sdk_pcm_compilation, { "dep": module_dep, + "enable_cxx_interop": enable_cxx_interop, "name": module_dep.label, "swift_cxx_args": swift_cxx_args, "_apple_toolchain": ctx.attrs._apple_toolchain, @@ -143,7 +145,7 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov "-I.", ]) - cmd.add(sdk_deps_tset.project_as_args("clang_deps")) + cmd.add(sdk_deps_tset.project_as_args("clang_module_file_flags")) expanded_modulemap_path_cmd = expand_relative_prefixed_sdk_path( cmd_args(swift_toolchain.sdk_path), @@ -172,6 +174,29 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov cmd.add(ctx.attrs.swift_cxx_args) + if ctx.attrs.enable_cxx_interop: + # The stdlib headers have deprecation warnings set when targeting + # more recent versions. These warnings get serialized in the + # modules and make it impossible to import the std module, so + # suppress them during compilation instead. + cmd.add([ + "-Xcc", + "-D_LIBCPP_DISABLE_DEPRECATION_WARNINGS", + ]) + + if module_name == "Darwin": + # The Darwin module requires special handling with cxx interop + # to ensure that it does not include the c++ headers. The module + # is marked with [no_undeclared_includes] which will prevent + # including headers declared in other modulemaps. So that the + # cxx modules are visible we need to pass the module map path + # without the corresponding module file, which we cannot build + # until the Darwin module is available. + cmd.add([ + "-Xcc", + cmd_args(swift_toolchain.sdk_path, format = "-fmodule-map-file={}/usr/include/c++/v1/module.modulemap"), + ]) + _add_sdk_module_search_path(cmd, uncompiled_sdk_module_info, apple_toolchain) ctx.actions.run( @@ -183,9 +208,9 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov ) # Construct the args needed to be passed to the clang importer - clang_importer_args = cmd_args() - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args = cmd_args() + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-file=", @@ -196,8 +221,8 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov delimiter = "", ), ) - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-map-file=", @@ -208,11 +233,13 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov ) compiled_sdk = SwiftCompiledModuleInfo( - clang_importer_args = clang_importer_args, + clang_module_file_args = clang_deps_args, is_framework = uncompiled_sdk_module_info.is_framework, + is_sdk_module = True, is_swiftmodule = False, module_name = module_name, output_artifact = pcm_output, + clang_modulemap = expanded_modulemap_path_cmd, ) return [ @@ -224,9 +251,11 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov ] # Compile the transitive clang module deps of this target. + deps = ctx.attrs.dep[SdkUncompiledModuleInfo].cxx_deps if ctx.attrs.enable_cxx_interop else ctx.attrs.dep[SdkUncompiledModuleInfo].deps clang_module_deps = get_swift_sdk_pcm_anon_targets( ctx, - ctx.attrs.dep[SdkUncompiledModuleInfo].deps, + ctx.attrs.enable_cxx_interop, + deps, ctx.attrs.swift_cxx_args, ) @@ -236,6 +265,7 @@ _swift_sdk_pcm_compilation = rule( impl = _swift_sdk_pcm_compilation_impl, attrs = { "dep": attrs.dep(), + "enable_cxx_interop": attrs.bool(), "swift_cxx_args": attrs.list(attrs.string(), default = []), "_apple_toolchain": attrs.dep(), }, diff --git a/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl b/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl index 20667ddaf8a..48860952d8b 100644 --- a/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl +++ b/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl @@ -14,7 +14,7 @@ load( "extract_and_merge_clang_debug_infos", "extract_and_merge_swift_debug_infos", ) -load(":swift_module_map.bzl", "write_swift_module_map") +load(":swift_module_map.bzl", "write_swift_module_map_with_deps") load(":swift_sdk_pcm_compilation.bzl", "get_swift_sdk_pcm_anon_targets") load(":swift_toolchain_types.bzl", "SdkUncompiledModuleInfo", "SwiftCompiledModuleInfo", "SwiftCompiledModuleTset", "WrappedSdkCompiledModuleInfo") @@ -52,12 +52,12 @@ def _swift_interface_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Pr clang_deps_tset = get_compiled_sdk_clang_deps_tset(ctx, sdk_deps_providers) swift_deps_tset = get_compiled_sdk_swift_deps_tset(ctx, sdk_deps_providers) - swift_module_map_artifact = write_swift_module_map(ctx, uncompiled_module_info_name, swift_deps_tset) + swift_module_map_artifact = write_swift_module_map_with_deps(ctx, uncompiled_module_info_name, swift_deps_tset) cmd.add([ "-explicit-swift-module-map-file", swift_module_map_artifact, ]) - cmd.add(clang_deps_tset.project_as_args("clang_deps")) + cmd.add(clang_deps_tset.project_as_args("clang_module_file_flags")) swiftmodule_output = ctx.actions.declare_output(uncompiled_module_info_name + SWIFTMODULE_EXTENSION) expanded_swiftinterface_cmd = expand_relative_prefixed_sdk_path( @@ -80,6 +80,7 @@ def _swift_interface_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Pr compiled_sdk = SwiftCompiledModuleInfo( is_framework = uncompiled_sdk_module_info.is_framework, + is_sdk_module = True, is_swiftmodule = True, module_name = uncompiled_module_info_name, output_artifact = swiftmodule_output, @@ -98,6 +99,7 @@ def _swift_interface_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Pr module_info = ctx.attrs.dep[SdkUncompiledModuleInfo] clang_module_deps = get_swift_sdk_pcm_anon_targets( ctx, + False, module_info.deps, ["-target", module_info.target], ) diff --git a/prelude/apple/swift/swift_toolchain.bzl b/prelude/apple/swift/swift_toolchain.bzl index 7e89e77436d..36ab429d11f 100644 --- a/prelude/apple/swift/swift_toolchain.bzl +++ b/prelude/apple/swift/swift_toolchain.bzl @@ -29,12 +29,13 @@ def traverse_sdk_modules_graph( elif not uncompiled_sdk_module_info.is_swiftmodule and uncompiled_sdk_module_info.module_name in clang_sdk_module_name_to_deps: return - for uncompiled_dep in uncompiled_sdk_module_info.deps: + for uncompiled_dep in uncompiled_sdk_module_info.deps + uncompiled_sdk_module_info.cxx_deps: traverse_sdk_modules_graph(swift_sdk_module_name_to_deps, clang_sdk_module_name_to_deps, uncompiled_dep) - if uncompiled_sdk_module_info.is_swiftmodule: - swift_sdk_module_name_to_deps[uncompiled_sdk_module_info.module_name] = sdk_module_dep - else: - clang_sdk_module_name_to_deps[uncompiled_sdk_module_info.module_name] = sdk_module_dep + + if uncompiled_sdk_module_info.is_swiftmodule: + swift_sdk_module_name_to_deps[uncompiled_sdk_module_info.module_name] = sdk_module_dep + else: + clang_sdk_module_name_to_deps[uncompiled_sdk_module_info.module_name] = sdk_module_dep def swift_toolchain_impl(ctx): # All Clang's PCMs need to be compiled with cxx flags of the target that imports them, @@ -55,26 +56,23 @@ def swift_toolchain_impl(ctx): SwiftToolchainInfo( architecture = ctx.attrs.architecture, can_toolchain_emit_obj_c_header_textually = ctx.attrs.can_toolchain_emit_obj_c_header_textually, - # TODO(T99038725): until we add -debug-compilation-dir we need to wrap - # the Swift invocations so that we can apply a debug prefix map for - # the current directory while maintaining cache hit. - uncompiled_swift_sdk_modules_deps = uncompiled_swift_sdk_modules_deps, - uncompiled_clang_sdk_modules_deps = uncompiled_clang_sdk_modules_deps, compiler = cmd_args(ctx.attrs._swiftc_wrapper[RunInfo]).add(ctx.attrs.swiftc[RunInfo]), compiler_flags = ctx.attrs.swiftc_flags, - prefix_serialized_debugging_options = ctx.attrs.prefix_serialized_debug_info, + mk_swift_comp_db = ctx.attrs.make_swift_comp_db, + mk_swift_interface = cmd_args(ctx.attrs._swiftc_wrapper[RunInfo]).add(ctx.attrs.make_swift_interface[RunInfo]), object_format = SwiftObjectFormat(ctx.attrs.object_format) if ctx.attrs.object_format else SwiftObjectFormat("object"), + prefix_serialized_debugging_options = ctx.attrs.prefix_serialized_debug_info, resource_dir = ctx.attrs.resource_dir, runtime_run_paths = ctx.attrs.runtime_run_paths, sdk_path = ctx.attrs._internal_sdk_path or ctx.attrs.sdk_path, - swift_stdlib_tool = ctx.attrs.swift_stdlib_tool[RunInfo], - swift_stdlib_tool_flags = ctx.attrs.swift_stdlib_tool_flags, - swift_ide_test_tool = ctx.attrs.swift_ide_test_tool[RunInfo] if ctx.attrs.swift_ide_test_tool else None, - mk_swift_interface = cmd_args(ctx.attrs._swiftc_wrapper[RunInfo]).add(ctx.attrs.make_swift_interface[RunInfo]), + supports_cxx_interop_requirement_at_import = ctx.attrs.supports_cxx_interop_requirement_at_import, supports_relative_resource_dir = ctx.attrs.supports_relative_resource_dir, supports_swift_cxx_interoperability_mode = ctx.attrs.supports_swift_cxx_interoperability_mode, supports_swift_importing_objc_forward_declarations = ctx.attrs.supports_swift_importing_obj_c_forward_declarations, - supports_cxx_interop_requirement_at_import = ctx.attrs.supports_cxx_interop_requirement_at_import, - mk_swift_comp_db = ctx.attrs.make_swift_comp_db, + swift_ide_test_tool = ctx.attrs.swift_ide_test_tool[RunInfo] if ctx.attrs.swift_ide_test_tool else None, + swift_stdlib_tool = ctx.attrs.swift_stdlib_tool[RunInfo], + swift_stdlib_tool_flags = ctx.attrs.swift_stdlib_tool_flags, + uncompiled_clang_sdk_modules_deps = uncompiled_clang_sdk_modules_deps, + uncompiled_swift_sdk_modules_deps = uncompiled_swift_sdk_modules_deps, ), ] diff --git a/prelude/apple/swift/swift_toolchain_types.bzl b/prelude/apple/swift/swift_toolchain_types.bzl index 10e6941c3d0..7e2737a78a1 100644 --- a/prelude/apple/swift/swift_toolchain_types.bzl +++ b/prelude/apple/swift/swift_toolchain_types.bzl @@ -17,34 +17,34 @@ SwiftObjectFormat = enum( ) SwiftToolchainInfo = provider( - # @unsorted-dict-items fields = { "architecture": provider_field(typing.Any, default = None), "can_toolchain_emit_obj_c_header_textually": provider_field(typing.Any, default = None), # bool - "uncompiled_swift_sdk_modules_deps": provider_field(typing.Any, default = None), # {str: dependency} Expose deps of uncompiled Swift SDK modules. - "uncompiled_clang_sdk_modules_deps": provider_field(typing.Any, default = None), # {str: dependency} Expose deps of uncompiled Clang SDK modules. - "compiler_flags": provider_field(typing.Any, default = None), "compiler": provider_field(typing.Any, default = None), - "prefix_serialized_debugging_options": provider_field(typing.Any, default = None), # bool + "compiler_flags": provider_field(typing.Any, default = None), + "mk_swift_comp_db": provider_field(typing.Any, default = None), + "mk_swift_interface": provider_field(typing.Any, default = None), "object_format": provider_field(typing.Any, default = None), # "SwiftObjectFormat" + "prefix_serialized_debugging_options": provider_field(typing.Any, default = None), # bool "resource_dir": provider_field(typing.Any, default = None), # "artifact", - "sdk_path": provider_field(typing.Any, default = None), - "swift_stdlib_tool_flags": provider_field(typing.Any, default = None), - "swift_stdlib_tool": provider_field(typing.Any, default = None), - "swift_ide_test_tool": provider_field(typing.Any, default = None), - "mk_swift_interface": provider_field(typing.Any, default = None), "runtime_run_paths": provider_field(typing.Any, default = None), # [str] + "sdk_path": provider_field(typing.Any, default = None), + "supports_cxx_interop_requirement_at_import": provider_field(typing.Any, default = None), # bool "supports_relative_resource_dir": provider_field(typing.Any, default = None), # bool "supports_swift_cxx_interoperability_mode": provider_field(typing.Any, default = None), # bool "supports_swift_importing_objc_forward_declarations": provider_field(typing.Any, default = None), # bool - "supports_cxx_interop_requirement_at_import": provider_field(typing.Any, default = None), # bool - "mk_swift_comp_db": provider_field(typing.Any, default = None), + "swift_ide_test_tool": provider_field(typing.Any, default = None), + "swift_stdlib_tool": provider_field(typing.Any, default = None), + "swift_stdlib_tool_flags": provider_field(typing.Any, default = None), + "uncompiled_clang_sdk_modules_deps": provider_field(typing.Any, default = None), # {str: dependency} Expose deps of uncompiled Clang SDK modules. + "uncompiled_swift_sdk_modules_deps": provider_field(typing.Any, default = None), # {str: dependency} Expose deps of uncompiled Swift SDK modules. }, ) # A provider that represents a non-yet-compiled SDK (Swift or Clang) module, # and doesn't contain any artifacts because Swift toolchain isn't resolved yet. SdkUncompiledModuleInfo = provider(fields = { + "cxx_deps": provider_field(typing.Any, default = None), # [Dependency] "deps": provider_field(typing.Any, default = None), # [Dependency] "input_relative_path": provider_field(typing.Any, default = None), # A relative prefixed path to a textual swiftinterface/modulemap file within an SDK. "is_framework": provider_field(typing.Any, default = None), # This is mostly needed for the generated Swift module map file. @@ -66,8 +66,11 @@ SdkSwiftOverlayInfo = provider(fields = { }) SwiftCompiledModuleInfo = provider(fields = { - "clang_importer_args": provider_field(typing.Any, default = None), # cmd_args of include flags for the clang importer. + "clang_importer_args": provider_field(typing.Any, default = None), # cmd_args of additional flags for the clang importer. + "clang_module_file_args": provider_field(typing.Any, default = None), # cmd_args of include flags for the clang importer. + "clang_modulemap": provider_field(typing.Any, default = None), # Clang modulemap file which is required for generation of swift_module_map. "is_framework": provider_field(typing.Any, default = None), + "is_sdk_module": provider_field(bool, default = False), "is_swiftmodule": provider_field(typing.Any, default = None), # If True then contains a compiled swiftmodule, otherwise Clang's pcm. "module_name": provider_field(typing.Any, default = None), # A real name of a module, without distinguishing suffixes. "output_artifact": provider_field(typing.Any, default = None), # Compiled artifact either swiftmodule or pcm. @@ -75,24 +78,43 @@ SwiftCompiledModuleInfo = provider(fields = { def _add_swiftmodule_search_path(module_info: SwiftCompiledModuleInfo): # We need to import the containing folder, not the file itself. - return ["-I", cmd_args(module_info.output_artifact).parent()] if module_info.is_swiftmodule else [] + # We skip SDK modules as those are found via the -sdk flag. + if module_info.is_swiftmodule and not module_info.is_sdk_module: + return ["-I", cmd_args(module_info.output_artifact, parent = 1)] -def _add_clang_import_flags(module_info: SwiftCompiledModuleInfo): + return [] + +def _add_clang_module_file_flags(module_info: SwiftCompiledModuleInfo): if module_info.is_swiftmodule: return [] else: - return [module_info.clang_importer_args] + return [module_info.clang_module_file_args] + +def _add_clang_importer_flags(module_info: SwiftCompiledModuleInfo): + if module_info.is_swiftmodule: + return [] + else: + return [module_info.clang_importer_args] if module_info.clang_importer_args else [] def _swift_module_map_struct(module_info: SwiftCompiledModuleInfo): - return struct( - isFramework = module_info.is_framework, - moduleName = module_info.module_name, - modulePath = module_info.output_artifact, - ) + if module_info.is_swiftmodule: + return struct( + isFramework = module_info.is_framework, + moduleName = module_info.module_name, + modulePath = module_info.output_artifact, + ) + else: + return struct( + isFramework = module_info.is_framework, + moduleName = module_info.module_name, + clangModulePath = module_info.output_artifact, + clangModuleMapPath = cmd_args([module_info.clang_modulemap], delimiter = ""), + ) SwiftCompiledModuleTset = transitive_set( args_projections = { - "clang_deps": _add_clang_import_flags, + "clang_importer_flags": _add_clang_importer_flags, # Additional clang flags required for compilation. + "clang_module_file_flags": _add_clang_module_file_flags, # Projects pcm modules as cli flags. "module_search_path": _add_swiftmodule_search_path, }, json_projections = { diff --git a/prelude/apple/tools/BUCK.v2 b/prelude/apple/tools/BUCK.v2 index b31f120b5b3..19dd72957e8 100644 --- a/prelude/apple/tools/BUCK.v2 +++ b/prelude/apple/tools/BUCK.v2 @@ -1,16 +1,35 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + apple_tools( name = "apple-tools", - assemble_bundle = "prelude//apple/tools/bundling:assemble_bundle", + adhoc_codesign_tool = None, # @oss-enable # @oss-disable: adhoc_codesign_tool = "prelude//apple/tools/meta_only/codesign_rust:adhoc-signer", - adhoc_codesign_tool = None # @oss-enable - split_arch_combine_dsym_bundles_tool = ":split_arch_combine_dsym_bundles_tool", + assemble_bundle = "prelude//apple/tools/bundling:assemble_bundle", dry_codesign_tool = ":dry_codesign_tool", + framework_sanitizer = ":framework_sanitizer", info_plist_processor = "prelude//apple/tools/info_plist_processor:tool", ipa_package_maker = ":ipa_package_maker", make_modulemap = ":make_modulemap", make_vfsoverlay = ":make_vfsoverlay", selective_debugging_scrubber = "prelude//apple/tools/selective_debugging:tool", - swift_objc_header_postprocess = ":swift_objc_header_postprocess", + split_arch_combine_dsym_bundles_tool = ":split_arch_combine_dsym_bundles_tool", + visibility = ["PUBLIC"], + xcframework_maker = ":xcframework_maker", +) + +python_binary( + name = "framework_sanitizer", + main = "framework_sanitizer.py", + visibility = ["PUBLIC"], +) + +python_binary( + name = "xcframework_maker", + main = "xcframework_maker.py", visibility = ["PUBLIC"], ) @@ -65,19 +84,8 @@ python_binary( visibility = ["PUBLIC"], ) -command_alias( - name = "swift_exec", - exe = ":swift_exec.sh", - visibility = ["PUBLIC"], -) - -export_file( - name = "swift_exec.sh", - src = "swift_exec.sh", -) - python_bootstrap_binary( - name = "swift_objc_header_postprocess", - main = "swift_objc_header_postprocess.py", + name = "swift_exec", + main = "swift_exec.py", visibility = ["PUBLIC"], ) diff --git a/prelude/apple/tools/bundling/BUCK.v2 b/prelude/apple/tools/bundling/BUCK.v2 index adcd80c1373..1352fa2736a 100644 --- a/prelude/apple/tools/bundling/BUCK.v2 +++ b/prelude/apple/tools/bundling/BUCK.v2 @@ -1,5 +1,10 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load("@prelude//apple/tools/defs.bzl", "meta_python_test") +oncall("build_infra") + +source_listing() + python_binary( name = "assemble_bundle", main = "main.py", diff --git a/prelude/apple/tools/bundling/assemble_bundle.py b/prelude/apple/tools/bundling/assemble_bundle.py index 6819eb1b5a8..13ae19a4ea1 100644 --- a/prelude/apple/tools/bundling/assemble_bundle.py +++ b/prelude/apple/tools/bundling/assemble_bundle.py @@ -7,6 +7,7 @@ # pyre-strict +import filecmp import logging import os import shutil @@ -28,6 +29,7 @@ def assemble_bundle( bundle_path: Path, incremental_context: Optional[IncrementalContext], check_conflicts: bool, + versioned_if_macos: bool, ) -> Optional[List[IncrementalStateItem]]: incremental_result = None if incremental_context: @@ -38,14 +40,19 @@ def assemble_bundle( incremental_context.metadata, cast(IncrementalState, incremental_context.state), check_conflicts, + versioned_if_macos, ) else: - _assemble_non_incrementally(bundle_path, spec, check_conflicts) + _assemble_non_incrementally( + bundle_path, spec, check_conflicts, versioned_if_macos + ) incremental_result = calculate_incremental_state( spec, incremental_context.metadata ) else: - _assemble_non_incrementally(bundle_path, spec, check_conflicts) + _assemble_non_incrementally( + bundle_path, spec, check_conflicts, versioned_if_macos + ) # External tooling (e.g., Xcode) might depend on the timestamp of the bundle bundle_path.touch() @@ -60,9 +67,12 @@ def _cleanup_output(incremental: bool, path: Path) -> None: def _assemble_non_incrementally( - bundle_path: Path, spec: List[BundleSpecItem], check_conflicts: bool + bundle_path: Path, + spec: List[BundleSpecItem], + check_conflicts: bool, + versioned_if_macos: bool, ) -> None: - logging.getLogger(__name__).info("Assembling bundle non-incrementally.") + _LOGGER.info("Assembling bundle non-incrementally.") _cleanup_output(incremental=False, path=bundle_path) copied_contents: Dict[Path, str] = {} @@ -70,6 +80,12 @@ def _assemble_non_incrementally( def _copy(src: str, dst: Path, **kwargs: Any) -> None: if check_conflicts: if dst in copied_contents: + if filecmp.cmp(src, str(dst), shallow=False): + _LOGGER.info( + f"Found a conflict for destination `{os.path.relpath(dst, bundle_path)}` but the files are identical. Treating as a non-conflict as this can normally happen for universal builds." + ) + return + raise RuntimeError( f"Found a conflict for destination `{os.path.relpath(dst, bundle_path)}`: `{src}` conflicts with `{copied_contents[dst]}`" ) @@ -77,11 +93,21 @@ def _copy(src: str, dst: Path, **kwargs: Any) -> None: if check_conflicts: copied_contents[dst] = src + symlinks = set() + for spec_item in spec: source_path = spec_item.src destination_path = bundle_path / spec_item.dst destination_path.parent.mkdir(parents=True, exist_ok=True) + if spec_item.dst.startswith("Versions/A") and versioned_if_macos: + parts = Path(spec_item.dst).parts + if len(parts) <= 2: + raise RuntimeError( + "Versioned bundles cannot be created from a single copy directly to Versions/A" + ) + symlinks.add(parts[2]) + if os.path.isdir(source_path): shutil.copytree( source_path, @@ -93,6 +119,16 @@ def _copy(src: str, dst: Path, **kwargs: Any) -> None: else: _copy(source_path, destination_path) + _create_symlinks(symlinks, bundle_path) + + +def _create_symlinks(symlinks: set[str], bundle_path: Path) -> None: + if symlinks and not Path.exists(bundle_path / "Versions/Current"): + os.symlink("A", bundle_path / "Versions/Current") + for dir_to_link in symlinks: + if not Path.exists(bundle_path / dir_to_link): + os.symlink("Versions/Current/" + dir_to_link, bundle_path / dir_to_link) + def _assemble_incrementally( bundle_path: Path, @@ -100,8 +136,9 @@ def _assemble_incrementally( action_metadata: Dict[Path, str], incremental_state: IncrementalState, check_conflicts: bool, + versioned_if_macos: bool, ) -> List[IncrementalStateItem]: - logging.getLogger(__name__).info("Assembling bundle incrementally.") + _LOGGER.info("Assembling bundle incrementally.") _cleanup_output(incremental=True, path=bundle_path) _delete_swift_stdlib_files(bundle_path, incremental_state.swift_stdlib_paths) paths_to_delete = { @@ -122,8 +159,12 @@ def _assemble_incrementally( if check_conflicts: _check_path_conflicts(new_incremental_state) - else: - new_incremental_state = _filter_conflicting_paths(new_incremental_state) + + # Still need to run filtering even when check_conflicts is set, for removing the conflicts with same files + new_incremental_state = _filter_conflicting_paths(new_incremental_state) + + new_symlinks = set() + versioned_subdir = Path("Versions/A") for item in new_incremental_state: # Added file might not be present in old result, need to check first. @@ -150,6 +191,12 @@ def _assemble_incrementally( ) project_relative_dst.parent.mkdir(parents=True, exist_ok=True) shutil.copy2(item.source, project_relative_dst, follow_symlinks=False) + if Path(dst).is_relative_to(versioned_subdir): + symlink = Path(dst).relative_to(versioned_subdir).parts[0] + new_symlinks.add(symlink) + + if versioned_if_macos: + _create_symlinks(new_symlinks, bundle_path) for path in paths_to_delete: (bundle_path / path).unlink() @@ -162,14 +209,22 @@ def _assemble_incrementally( def _check_path_conflicts(incremental_state: List[IncrementalStateItem]) -> None: + """ + Throws an exception if there are multiple items with the same destination path, and those are different files. + """ checked = {} for item in incremental_state: dst = item.destination_relative_to_bundle if dst in checked: - raise RuntimeError( - f"Found a conflict for destination `{dst}`: `{item.source}` conflicts with `{checked[dst]}`" - ) - checked[dst] = item.source + if item.digest != checked[dst].digest: + raise RuntimeError( + f"Found a conflict for destination `{dst}`: `{item.source}` conflicts with `{checked[dst].source}`" + ) + else: + _LOGGER.info( + f"Found a conflict for destination `{dst}` but the files are identical. Treating as a non-conflict as this can normally happen for universal builds." + ) + checked[dst] = item def _filter_conflicting_paths( @@ -214,5 +269,8 @@ def _cleanup_empty_redundant_directories( new_directories = { p for item in new_state for p in item.destination_relative_to_bundle.parents } + versioned_subdir = Path("Versions/A") for redundant_directory in old_directories - new_directories: shutil.rmtree(bundle_path / redundant_directory, ignore_errors=True) + if redundant_directory.parent == versioned_subdir: + Path.unlink(bundle_path / redundant_directory.name) diff --git a/prelude/apple/tools/bundling/assemble_bundle_types.py b/prelude/apple/tools/bundling/assemble_bundle_types.py index 88233489fdf..563b3bef976 100644 --- a/prelude/apple/tools/bundling/assemble_bundle_types.py +++ b/prelude/apple/tools/bundling/assemble_bundle_types.py @@ -12,7 +12,7 @@ import functools from dataclasses import dataclass from pathlib import Path -from typing import Dict, Optional +from typing import Dict, List, Optional from apple.tools.code_signing.codesign_bundle import CodesignConfiguration @@ -26,6 +26,8 @@ class BundleSpecItem: # Should be bundle relative path, empty string means the root of the bundle dst: str codesign_on_copy: bool = False + codesign_entitlements: Optional[str] = None + codesign_flags_override: Optional[List[str]] = None def __eq__(self: BundleSpecItem, other: Optional[BundleSpecItem]) -> bool: return ( @@ -33,19 +35,55 @@ def __eq__(self: BundleSpecItem, other: Optional[BundleSpecItem]) -> bool: and self.src == other.src and self.dst == other.dst and self.codesign_on_copy == other.codesign_on_copy + and self.codesign_entitlements == other.codesign_entitlements + and self.codesign_flags_override == other.codesign_flags_override ) def __ne__(self: BundleSpecItem, other: BundleSpecItem) -> bool: return not self.__eq__(other) def __hash__(self: BundleSpecItem) -> int: - return hash((self.src, self.dst, self.codesign_on_copy)) + return hash( + ( + self.src, + self.dst, + self.codesign_on_copy, + self.codesign_entitlements, + ( + tuple(self.codesign_flags_override) + if self.codesign_flags_override is not None + else hash(None) + ), + ) + ) def __lt__(self: BundleSpecItem, other: BundleSpecItem) -> bool: return ( self.src < other.src or self.dst < other.dst or self.codesign_on_copy < other.codesign_on_copy + or ( + self.codesign_entitlements < other.codesign_entitlements + if ( + self.codesign_entitlements is not None + and other.codesign_entitlements is not None + ) + else ( + self.codesign_entitlements is None + and other.codesign_entitlements is not None + ) + ) + or ( + self.codesign_flags_override < other.codesign_flags_override + if ( + self.codesign_flags_override is not None + and other.codesign_flags_override is not None + ) + else ( + self.codesign_flags_override is None + and other.codesign_flags_override is not None + ) + ) ) @@ -62,3 +100,5 @@ class IncrementalContext: codesigned: bool codesign_configuration: Optional[CodesignConfiguration] codesign_identity: Optional[str] + codesign_arguments: List[str] + versioned_if_macos: bool diff --git a/prelude/apple/tools/bundling/incremental_state.py b/prelude/apple/tools/bundling/incremental_state.py index 4588e8adc3a..49bb78f8acb 100644 --- a/prelude/apple/tools/bundling/incremental_state.py +++ b/prelude/apple/tools/bundling/incremental_state.py @@ -7,6 +7,8 @@ # pyre-strict +from __future__ import annotations + import json from dataclasses import dataclass from io import TextIOBase @@ -15,7 +17,7 @@ from apple.tools.code_signing.codesign_bundle import CodesignConfiguration -_VERSION = 3 +_VERSION = 7 @dataclass @@ -35,6 +37,35 @@ class IncrementalStateItem: """ +@dataclass +class CodesignedOnCopy: + path: Path + """ + Path relative to bundle root which needs to be codesigned + """ + entitlements_digest: Optional[str] + """ + Digest of entitlements used when the given path is codesigned on copy + """ + codesign_flags_override: Optional[List[str]] + """ + If present, overrides codesign arguments (which are used for root bundle) when the given path is codesigned on copy + """ + + def __hash__(self: CodesignedOnCopy) -> int: + return hash( + ( + self.path, + self.entitlements_digest, + ( + tuple(self.codesign_flags_override) + if self.codesign_flags_override is not None + else hash(None) + ), + ) + ) + + @dataclass class IncrementalState: """ @@ -44,9 +75,11 @@ class IncrementalState: items: List[IncrementalStateItem] codesigned: bool codesign_configuration: CodesignConfiguration - codesign_on_copy_paths: List[Path] + codesigned_on_copy: List[CodesignedOnCopy] codesign_identity: Optional[str] + codesign_arguments: List[str] swift_stdlib_paths: List[Path] + versioned_if_macos: bool version: int = _VERSION @@ -59,10 +92,12 @@ def default(self, o: object) -> object: "codesign_configuration": ( o.codesign_configuration.value if o.codesign_configuration else None ), - "codesign_on_copy_paths": [str(p) for p in o.codesign_on_copy_paths], + "codesigned_on_copy": [self.default(i) for i in o.codesigned_on_copy], "codesign_identity": o.codesign_identity, "swift_stdlib_paths": [str(p) for p in o.swift_stdlib_paths], "version": o.version, + "codesign_arguments": o.codesign_arguments, + "versioned_if_macos": o.versioned_if_macos, } elif isinstance(o, IncrementalStateItem): result = { @@ -74,15 +109,22 @@ def default(self, o: object) -> object: if o.resolved_symlink is not None: result["resolved_symlink"] = str(o.resolved_symlink) return result + elif isinstance(o, CodesignedOnCopy): + result = {} + result["path"] = str(o.path) + if o.entitlements_digest is not None: + result["entitlements_digest"] = str(o.entitlements_digest) + if o.codesign_flags_override is not None: + result["codesign_flags_override"] = o.codesign_flags_override + return result else: return super().default(o) -def _object_hook(dict: Dict[str, Any]) -> Union[IncrementalState, IncrementalStateItem]: +def _object_hook( + dict: Dict[str, Any] +) -> Union[IncrementalState, IncrementalStateItem, CodesignedOnCopy]: if "version" in dict: - dict["codesign_on_copy_paths"] = [ - Path(p) for p in dict.pop("codesign_on_copy_paths") - ] codesign_configuration = dict.pop("codesign_configuration") dict["codesign_configuration"] = ( CodesignConfiguration(codesign_configuration) @@ -91,7 +133,7 @@ def _object_hook(dict: Dict[str, Any]) -> Union[IncrementalState, IncrementalSta ) dict["swift_stdlib_paths"] = [Path(p) for p in dict.pop("swift_stdlib_paths")] return IncrementalState(**dict) - else: + elif "destination_relative_to_bundle" in dict: dict["source"] = Path(dict.pop("source")) dict["destination_relative_to_bundle"] = Path( dict.pop("destination_relative_to_bundle") @@ -100,6 +142,11 @@ def _object_hook(dict: Dict[str, Any]) -> Union[IncrementalState, IncrementalSta resolved_symlink = dict.pop("resolved_symlink", None) dict["resolved_symlink"] = Path(resolved_symlink) if resolved_symlink else None return IncrementalStateItem(**dict) + else: + dict["path"] = Path(dict.pop("path")) + dict["entitlements_digest"] = dict.pop("entitlements_digest", None) + dict["codesign_flags_override"] = dict.pop("codesign_flags_override", None) + return CodesignedOnCopy(**dict) def parse_incremental_state(data: TextIOBase) -> IncrementalState: diff --git a/prelude/apple/tools/bundling/incremental_state_test.py b/prelude/apple/tools/bundling/incremental_state_test.py index 3f55977505a..b893e1cd20a 100644 --- a/prelude/apple/tools/bundling/incremental_state_test.py +++ b/prelude/apple/tools/bundling/incremental_state_test.py @@ -5,20 +5,72 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +import io +import json import unittest -from json import JSONDecodeError from pathlib import Path import pkg_resources from .incremental_state import ( + CodesignedOnCopy, IncrementalState, IncrementalStateItem, + IncrementalStateJSONEncoder, parse_incremental_state, ) class TestIncrementalState(unittest.TestCase): + def test_state_serialization_and_deserialization(self): + expected = IncrementalState( + items=[ + IncrementalStateItem( + source=Path("repo/foo.txt"), + destination_relative_to_bundle=Path("foo.txt"), + digest="foo_digest", + resolved_symlink=None, + ), + IncrementalStateItem( + source=Path("buck-out/bar.txt"), + destination_relative_to_bundle=Path("Resources/bar.txt"), + digest="bar_digest", + resolved_symlink=None, + ), + ], + codesigned=True, + codesign_configuration=None, + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("Resources/bar.txt"), + entitlements_digest=None, + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("Resources/baz.txt"), + entitlements_digest="abc", + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("Resources/qux.txt"), + entitlements_digest=None, + codesign_flags_override=["--deep", "--force"], + ), + ], + codesign_identity="Johnny Appleseed", + codesign_arguments=[ + "--force", + ], + swift_stdlib_paths=[Path("Frameworks/libswiftCore.dylib")], + versioned_if_macos=False, + ) + json_result = json.dumps(expected, cls=IncrementalStateJSONEncoder) + result = parse_incremental_state(io.StringIO(json_result)) + self.assertEqual( + result, + expected, + ) + def test_valid_state_is_parsed_successfully(self): file_content = pkg_resources.resource_stream( __name__, "test_resources/valid_incremental_state.json" @@ -47,9 +99,30 @@ def test_valid_state_is_parsed_successfully(self): ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[Path("Resources/bar.txt")], + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("Resources/bar.txt"), + entitlements_digest=None, + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("Resources/baz.txt"), + entitlements_digest="abc", + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("Resources/qux.txt"), + entitlements_digest=None, + codesign_flags_override=["--deep", "--force"], + ), + ], codesign_identity="Johny Appleseed", + codesign_arguments=[ + "--force", + "--deep", + ], swift_stdlib_paths=[Path("Frameworks/libswiftCore.dylib")], + versioned_if_macos=True, ) self.assertEqual( result, @@ -60,7 +133,7 @@ def test_error_when_invalid_metadata(self): file_content = pkg_resources.resource_stream( __name__, "test_resources/the.broken_json" ) - with self.assertRaises(JSONDecodeError): + with self.assertRaises(json.JSONDecodeError): _ = parse_incremental_state(file_content) def test_user_friendly_error_when_metadata_with_newer_version(self): diff --git a/prelude/apple/tools/bundling/incremental_utils.py b/prelude/apple/tools/bundling/incremental_utils.py index 38f7251e1b5..03e0f9a0c45 100644 --- a/prelude/apple/tools/bundling/incremental_utils.py +++ b/prelude/apple/tools/bundling/incremental_utils.py @@ -10,10 +10,10 @@ import logging import os from pathlib import Path -from typing import Dict, List, Set, Tuple +from typing import Dict, List, Optional, Set, Tuple from .assemble_bundle_types import BundleSpecItem, IncrementalContext -from .incremental_state import IncrementalStateItem +from .incremental_state import CodesignedOnCopy, IncrementalStateItem FILES_TO_BE_IGNORED: Set[str] = { # Storage of Finder settings, which shouldn't be added when enumerating files from sources @@ -30,6 +30,12 @@ def should_assemble_incrementally( "Decided not to assemble incrementally — no incremental state for previous build." ) return False + if previous_run_state.versioned_if_macos != incremental_context.versioned_if_macos: + logging.getLogger(__name__).info( + "Decided not to assemble incrementally — current build and previous build have different versioned_if_macos settings." + ) + return False + previously_codesigned = previous_run_state.codesigned # If previously bundle was not code signed there should be no problems with code signing # currently in incremental mode. Existing binaries could be code signed "on @@ -52,6 +58,12 @@ def should_assemble_incrementally( "Decided not to assemble incrementally — previous vs current builds have mismatching codesigning identities." ) return False + # If previous codesign arguments are different from the current ones also perform non-incremental run. + if previous_run_state.codesign_arguments != incremental_context.codesign_arguments: + logging.getLogger(__name__).info( + "Decided not to assemble incrementally — previous vs current builds have mismatching codesigning arguments." + ) + return False # If bundle from previous run was signed in a different configuration vs the current run (e.g. dry code signed while now regular code signing is required) perform non-incremental run. if ( previous_run_state.codesign_configuration @@ -64,29 +76,45 @@ def should_assemble_incrementally( # If there is an artifact that was code signed on copy in previous run which is # present in current run and not code signed on copy, we should perform # non-incremental run for simplicity and correctness reasons. - current_codesigned_on_copy_paths = {Path(i.dst) for i in spec if i.codesign_on_copy} + current_codesigned_on_copy_items = { + codesigned_on_copy_item( + path=Path(i.dst), + entitlements=( + Path(i.codesign_entitlements) if i.codesign_entitlements else None + ), + incremental_context=incremental_context, + codesign_flags_override=i.codesign_flags_override, + ) + for i in spec + if i.codesign_on_copy + } + codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build = _codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build( - set(previous_run_state.codesign_on_copy_paths), + previous_run_state.codesigned_on_copy, {Path(i.dst) for i in spec}, ) codesign_on_copy_paths_are_compatible = codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build.issubset( - current_codesigned_on_copy_paths + current_codesigned_on_copy_items ) if not codesign_on_copy_paths_are_compatible: logging.getLogger(__name__).info( - f"Decided not to assemble incrementally — there is at least one artifact `{list(codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build - current_codesigned_on_copy_paths)[0]}` that was code signed on copy in previous build which is present in current run and not code signed on copy." + f"Decided not to assemble incrementally — there is at least one artifact `{list(codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build - current_codesigned_on_copy_items)[0]}` that was code signed on copy in previous build which is present in current run and not code signed on copy (or codesigned but with a different set of entitlements and flags)." ) return codesign_on_copy_paths_are_compatible def _codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build( - previously_codesigned_on_copy_paths: Set[Path], + previously_codesigned_on_copy: List[CodesignedOnCopy], all_input_files: Set[Path], -) -> Set[Path]: +) -> Set[CodesignedOnCopy]: all_input_files_and_directories = all_input_files | { i for file in all_input_files for i in file.parents } - return previously_codesigned_on_copy_paths & all_input_files_and_directories + return { + i + for i in previously_codesigned_on_copy + if i.path in all_input_files_and_directories + } def _get_new_digest(action_metadata: Dict[Path, str], path: Path) -> str: @@ -167,3 +195,24 @@ def _list_directory_deterministically(directory: Path) -> List[Path]: # Sort in order for walk to be deterministic. dir_names.sort() return result + + +def codesigned_on_copy_item( + path: Path, + entitlements: Optional[Path], + incremental_context: IncrementalContext, + codesign_flags_override: Optional[List[str]], +) -> CodesignedOnCopy: + if entitlements is not None: + digest = incremental_context.metadata.get(entitlements) + if digest is None: + raise RuntimeError( + f"Expected digest for entitlements file path `{entitlements}` to be present in action metadata." + ) + else: + digest = None + return CodesignedOnCopy( + path=path, + entitlements_digest=digest, + codesign_flags_override=codesign_flags_override, + ) diff --git a/prelude/apple/tools/bundling/incremental_utils_test.py b/prelude/apple/tools/bundling/incremental_utils_test.py index de2f48f284f..47e7b1b9759 100644 --- a/prelude/apple/tools/bundling/incremental_utils_test.py +++ b/prelude/apple/tools/bundling/incremental_utils_test.py @@ -14,7 +14,7 @@ from apple.tools.code_signing.codesign_bundle import CodesignConfiguration from .assemble_bundle_types import BundleSpecItem -from .incremental_state import IncrementalState, IncrementalStateItem +from .incremental_state import CodesignedOnCopy, IncrementalState, IncrementalStateItem from .incremental_utils import ( calculate_incremental_state, IncrementalContext, @@ -53,6 +53,8 @@ def test_not_run_incrementally_when_previous_build_not_incremental(self): codesigned=False, codesign_configuration=None, codesign_identity=None, + codesign_arguments=[], + versioned_if_macos=True, ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) @@ -77,13 +79,17 @@ def test_run_incrementally_when_previous_build_not_codesigned(self): ], codesigned=False, codesign_configuration=None, - codesign_on_copy_paths=[], + codesigned_on_copy=[], codesign_identity=None, + codesign_arguments=[], + versioned_if_macos=True, swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=None, codesign_identity=None, + codesign_arguments=[], + versioned_if_macos=True, ) self.assertTrue(should_assemble_incrementally(spec, incremental_context)) @@ -110,13 +116,17 @@ def test_not_run_incrementally_when_previous_build_codesigned_and_current_is_not ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[], + codesigned_on_copy=[], codesign_identity=None, + codesign_arguments=[], + versioned_if_macos=True, swift_stdlib_paths=[], ), codesigned=False, codesign_configuration=None, codesign_identity=None, + codesign_arguments=[], + versioned_if_macos=True, ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) # Check that behavior changes when both builds are codesigned @@ -146,13 +156,17 @@ def test_not_run_incrementally_when_previous_build_codesigned_with_different_ide ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[], + codesigned_on_copy=[], codesign_identity="old_identity", + codesign_arguments=[], + versioned_if_macos=True, swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=None, codesign_identity="new_identity", + codesign_arguments=[], + versioned_if_macos=True, ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) # Check that behavior changes when identities are same @@ -172,9 +186,19 @@ def test_run_incrementally_when_codesign_on_copy_paths_match(self): dst="bar", codesign_on_copy=True, ), + BundleSpecItem( + src="src/baz", + dst="baz", + codesign_on_copy=True, + codesign_entitlements="entitlements.plist", + ), ] incremental_context = IncrementalContext( - metadata={Path("src/foo"): "digest"}, + metadata={ + Path("src/foo"): "digest", + Path("src/baz"): "digest2", + Path("entitlements.plist"): "entitlements_digest", + }, state=IncrementalState( items=[ IncrementalStateItem( @@ -182,17 +206,38 @@ def test_run_incrementally_when_codesign_on_copy_paths_match(self): destination_relative_to_bundle=Path("foo"), digest="digest", resolved_symlink=None, - ) + ), + IncrementalStateItem( + source=Path("src/baz"), + destination_relative_to_bundle=Path("baz"), + digest="digest2", + resolved_symlink=None, + ), ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[Path("foo")], + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("baz"), + entitlements_digest="entitlements_digest", + codesign_flags_override=None, + ), + ], codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=None, codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, ) self.assertTrue(should_assemble_incrementally(spec, incremental_context)) @@ -219,15 +264,160 @@ def test_not_run_incrementally_when_codesign_on_copy_paths_mismatch(self): codesigned=True, codesign_configuration=None, # but it was codesigned in old build - codesign_on_copy_paths=[Path("foo")], + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=None, + ) + ], + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + swift_stdlib_paths=[], + ), + codesigned=True, + codesign_configuration=None, + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + ) + self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + spec[0].codesign_on_copy = True + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) + + def test_not_run_incrementally_when_codesign_on_copy_entitlements_mismatch(self): + spec = [ + BundleSpecItem( + src="src/foo", + dst="foo", + codesign_on_copy=True, + codesign_entitlements="baz/entitlements.plist", + ) + ] + incremental_context = IncrementalContext( + metadata={ + Path("src/foo"): "digest", + Path("baz/entitlements.plist"): "new_digest", + }, + state=IncrementalState( + items=[ + IncrementalStateItem( + source=Path("src/foo"), + destination_relative_to_bundle=Path("foo"), + digest="digest", + resolved_symlink=None, + ) + ], + codesigned=True, + codesign_configuration=None, + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest="old_digest", + codesign_flags_override=None, + ) + ], + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + swift_stdlib_paths=[], + ), + codesigned=True, + codesign_configuration=None, + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + ) + self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + incremental_context.metadata[Path("baz/entitlements.plist")] = "old_digest" + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) + + def test_not_run_incrementally_when_codesign_on_copy_flags_mismatch(self): + spec = [ + BundleSpecItem( + src="src/foo", + dst="foo", + codesign_on_copy=True, + codesign_flags_override=["--force"], + ) + ] + incremental_context = IncrementalContext( + metadata={ + Path("src/foo"): "digest", + }, + state=IncrementalState( + items=[ + IncrementalStateItem( + source=Path("src/foo"), + destination_relative_to_bundle=Path("foo"), + digest="digest", + resolved_symlink=None, + ) + ], + codesigned=True, + codesign_configuration=None, + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=["--force", "--deep"], + ) + ], + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + swift_stdlib_paths=[], + ), + codesigned=True, + codesign_configuration=None, + codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, + ) + self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + incremental_context.state.codesigned_on_copy[0].codesign_flags_override = [ + "--force" + ] + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) + + def test_not_run_incrementally_when_codesign_arguments_mismatch(self): + spec = [ + BundleSpecItem( + src="src/foo", + dst="foo", + ) + ] + incremental_context = IncrementalContext( + metadata={ + Path("src/foo"): "digest", + }, + state=IncrementalState( + items=[ + IncrementalStateItem( + source=Path("src/foo"), + destination_relative_to_bundle=Path("foo"), + digest="digest", + resolved_symlink=None, + ) + ], + codesigned=True, + codesign_configuration=None, + codesigned_on_copy=[], codesign_identity="same_identity", + codesign_arguments=["--force"], swift_stdlib_paths=[], + versioned_if_macos=True, ), codesigned=True, codesign_configuration=None, codesign_identity="same_identity", + codesign_arguments=["--force", "--deep"], + versioned_if_macos=True, ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + incremental_context.codesign_arguments = ["--force"] + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) def test_not_run_incrementally_when_codesign_configurations_mismatch(self): spec = [ @@ -251,13 +441,23 @@ def test_not_run_incrementally_when_codesign_configurations_mismatch(self): codesigned=True, # Dry codesigned in old build codesign_configuration=CodesignConfiguration.dryRun, - codesign_on_copy_paths=[Path("foo")], + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=None, + ) + ], codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=CodesignConfiguration.dryRun, codesign_identity="same_identity", + codesign_arguments=[], + versioned_if_macos=True, ) # Canary self.assertTrue(should_assemble_incrementally(spec, incremental_context)) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 851ad0a6d12..11c4a892836 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -22,6 +22,7 @@ AdhocSigningContext, codesign_bundle, CodesignConfiguration, + CodesignedPath, signing_context_with_profile_selection, ) from apple.tools.code_signing.list_codesign_identities import ( @@ -36,11 +37,13 @@ from .assemble_bundle import assemble_bundle from .assemble_bundle_types import BundleSpecItem, IncrementalContext from .incremental_state import ( + CodesignedOnCopy, IncrementalState, IncrementalStateItem, IncrementalStateJSONEncoder, parse_incremental_state, ) +from .incremental_utils import codesigned_on_copy_item from .swift_support import run_swift_stdlib_tool, SwiftSupportArguments @@ -77,6 +80,19 @@ def _args_parser() -> argparse.ArgumentParser: required=False, help="Path to code signing utility. If not provided standard `codesign` tool will be used.", ) + parser.add_argument( + "--strict-provisioning-profile-search", + action="store_true", + required=False, + help="Fail code signing if more than one matching profile found.", + ) + parser.add_argument( + "--provisioning-profile-filter", + metavar="", + type=str, + required=False, + help="Regex to disambiguate multiple matching profiles, evaluated against provisioning profile filename.", + ) parser.add_argument( "--codesign-args", type=str, @@ -206,6 +222,13 @@ def _args_parser() -> argparse.ArgumentParser: required=False, help="Required if swift support was requested. Bundle relative destination path to frameworks directory.", ) + parser.add_argument( + "--extensionkit-extensions-destination", + metavar="", + type=Path, + required=False, + help="Required if swift support was requested. Bundle relative destination path to ExtensionKit Extensions directory.", + ) parser.add_argument( "--plugins-destination", metavar="", @@ -244,6 +267,12 @@ def _args_parser() -> argparse.ArgumentParser: action="store_true", help="Uses experimental faster provisioning profile parsing.", ) + parser.add_argument( + "--versioned-if-macos", + action="store_true", + help="Create symlinks for versioned macOS bundle", + ) + return parser @@ -311,6 +340,8 @@ def _main() -> None: ), log_file_path=args.log_file, should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, + strict_provisioning_profile_search=args.strict_provisioning_profile_search, + provisioning_profile_filter=args.provisioning_profile_filter, ) else: profile_selection_context = None @@ -333,6 +364,8 @@ def _main() -> None: list_codesign_identities=list_codesign_identities, log_file_path=args.log_file, should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, + strict_provisioning_profile_search=args.strict_provisioning_profile_search, + provisioning_profile_filter=args.provisioning_profile_filter, ) selected_identity_argument = ( signing_context.selected_profile_info.identity.fingerprint @@ -350,6 +383,8 @@ def _main() -> None: codesigned=args.codesign, codesign_configuration=args.codesign_configuration, codesign_identity=selected_identity_argument, + codesign_arguments=args.codesign_args, + versioned_if_macos=args.versioned_if_macos, ) incremental_state = assemble_bundle( @@ -357,6 +392,7 @@ def _main() -> None: bundle_path=args.output, incremental_context=incremental_context, check_conflicts=args.check_conflicts, + versioned_if_macos=args.versioned_if_macos, ) swift_support_args = _swift_support_arguments( @@ -381,21 +417,47 @@ def _main() -> None: raise RuntimeError( "Expected signing context to be created before bundling is done if codesign is requested." ) + + bundle_path = CodesignedPath( + path=args.output, entitlements=args.entitlements, flags=args.codesign_args + ) codesign_on_copy_paths = [ - i.dst for i in spec if i.codesign_on_copy - ] + swift_stdlib_paths + CodesignedPath( + path=bundle_path.path / i.dst, + entitlements=( + Path(i.codesign_entitlements) if i.codesign_entitlements else None + ), + flags=( + i.codesign_flags_override + if (i.codesign_flags_override is not None) + else args.codesign_args + ), + ) + for i in spec + if i.codesign_on_copy + ] + [ + CodesignedPath( + path=bundle_path.path / path, + entitlements=None, + flags=args.codesign_args, + ) + for path in swift_stdlib_paths + ] + codesign_bundle( - bundle_path=args.output, + bundle_path=bundle_path, signing_context=signing_context, - entitlements_path=args.entitlements, platform=args.platform, codesign_on_copy_paths=codesign_on_copy_paths, - codesign_args=args.codesign_args, codesign_tool=args.codesign_tool, codesign_configuration=args.codesign_configuration, ) if incremental_state: + if incremental_context is None: + raise RuntimeError( + "Expected incremental context to be present when incremental state is non-null." + ) _write_incremental_state( spec=spec, items=incremental_state, @@ -403,7 +465,10 @@ def _main() -> None: codesigned=args.codesign, codesign_configuration=args.codesign_configuration, selected_codesign_identity=selected_identity_argument, + codesign_arguments=args.codesign_args, swift_stdlib_paths=swift_stdlib_paths, + versioned_if_macos=args.versioned_if_macos, + incremental_context=incremental_context, ) if profiling_enabled: @@ -419,6 +484,8 @@ def _incremental_context( codesigned: bool, codesign_configuration: CodesignConfiguration, codesign_identity: Optional[str], + codesign_arguments: List[str], + versioned_if_macos: bool, ) -> Optional[IncrementalContext]: action_metadata = action_metadata_if_present(_METADATA_PATH_KEY) if action_metadata is None: @@ -437,6 +504,8 @@ def _incremental_context( codesigned=codesigned, codesign_configuration=codesign_configuration, codesign_identity=codesign_identity, + codesign_arguments=codesign_arguments, + versioned_if_macos=versioned_if_macos, ) @@ -478,6 +547,10 @@ def _swift_support_arguments( parser.error( "Expected `--frameworks-destination` argument to be specified when `--swift-stdlib-command` is present." ) + if not args.extensionkit_extensions_destination: + parser.error( + "Expected `--extensionkit-extensions-destination` argument to be specified when `--swift-stdlib-command` is present." + ) if not args.plugins_destination: parser.error( "Expected `--plugins-destination` argument to be specified when `--swift-stdlib-command` is present." @@ -495,6 +568,7 @@ def _swift_support_arguments( binary_destination=args.binary_destination, appclips_destination=args.appclips_destination, frameworks_destination=args.frameworks_destination, + extensionkit_extensions_destination=args.extensionkit_extensions_destination, plugins_destination=args.plugins_destination, platform=args.platform, sdk_root=args.sdk_root, @@ -508,15 +582,31 @@ def _write_incremental_state( codesigned: bool, codesign_configuration: CodesignConfiguration, selected_codesign_identity: Optional[str], + codesign_arguments: List[str], swift_stdlib_paths: List[Path], + versioned_if_macos: bool, + incremental_context: IncrementalContext, ) -> None: state = IncrementalState( items, codesigned=codesigned, codesign_configuration=codesign_configuration, - codesign_on_copy_paths=[Path(i.dst) for i in spec if i.codesign_on_copy], + codesigned_on_copy=[ + codesigned_on_copy_item( + path=Path(i.dst), + entitlements=( + Path(i.codesign_entitlements) if i.codesign_entitlements else None + ), + incremental_context=incremental_context, + codesign_flags_override=i.codesign_flags_override, + ) + for i in spec + if i.codesign_on_copy + ], codesign_identity=selected_codesign_identity, + codesign_arguments=codesign_arguments, swift_stdlib_paths=swift_stdlib_paths, + versioned_if_macos=versioned_if_macos, ) path.touch() try: diff --git a/prelude/apple/tools/bundling/swift_support.py b/prelude/apple/tools/bundling/swift_support.py index d1c53bf783b..58552408e07 100644 --- a/prelude/apple/tools/bundling/swift_support.py +++ b/prelude/apple/tools/bundling/swift_support.py @@ -26,6 +26,7 @@ class SwiftSupportArguments: binary_destination: Path appclips_destination: Path frameworks_destination: Path + extensionkit_extensions_destination: Path plugins_destination: Path platform: str sdk_root: Path @@ -73,6 +74,8 @@ def _execution_command( "--scan-folder", bundle_path / args.frameworks_destination, "--scan-folder", + bundle_path / args.extensionkit_extensions_destination, + "--scan-folder", bundle_path / args.plugins_destination, "--destination", tmp_dir, diff --git a/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json b/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json index 7e4b6d23661..79b549518e6 100644 --- a/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json +++ b/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json @@ -1,5 +1,5 @@ { - "version": 4, + "version": 7, "data": { "something": [] } diff --git a/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json b/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json index dd43ce55d7a..ff8bc2e49b0 100644 --- a/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json +++ b/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json @@ -18,12 +18,27 @@ ], "codesign_configuration": null, "codesigned": true, - "codesign_on_copy_paths": [ - "Resources/bar.txt" + "codesigned_on_copy": [ + { + "path": "Resources/bar.txt" + }, + { + "path": "Resources/baz.txt", + "entitlements_digest": "abc" + }, + { + "path": "Resources/qux.txt", + "codesign_flags_override": ["--deep", "--force"] + } ], "codesign_identity": "Johny Appleseed", + "codesign_arguments": [ + "--force", + "--deep" + ], + "versioned_if_macos": true, "swift_stdlib_paths": [ "Frameworks/libswiftCore.dylib" ], - "version": 3 + "version": 7 } diff --git a/prelude/apple/tools/code_signing/BUCK.v2 b/prelude/apple/tools/code_signing/BUCK.v2 index ecdcd43d9f6..85f36aba315 100644 --- a/prelude/apple/tools/code_signing/BUCK.v2 +++ b/prelude/apple/tools/code_signing/BUCK.v2 @@ -1,5 +1,22 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load("@prelude//apple/tools/defs.bzl", "meta_python_test") +oncall("build_infra") + +source_listing() + +configured_alias( + name = "dummy_binary_for_signing_configured", + actual = ":dummy_binary_for_signing", + platform = "config//platform/macos:base", +) + +cxx_binary( + name = "dummy_binary_for_signing", + srcs = ["dummy_binary_for_signing.c"], + default_target_platform = "config//platform/macos:base", +) + python_library( name = "lib", srcs = glob( @@ -11,11 +28,12 @@ python_library( "main.py", ], ), + resources = [":dummy_binary_for_signing_configured"], + visibility = ["PUBLIC"], deps = [ "prelude//apple/tools:plistlib_utils", "prelude//apple/tools/info_plist_processor:process", ], - visibility = ["PUBLIC"], ) meta_python_test( @@ -35,6 +53,7 @@ python_binary( main = "main.py", visibility = ["PUBLIC"], deps = [ + "fbsource//third-party/pypi/typed-argument-parser:typed-argument-parser", ":lib", ], ) diff --git a/prelude/apple/tools/code_signing/app_id.py b/prelude/apple/tools/code_signing/app_id.py index deac46ebae3..d657a4da943 100644 --- a/prelude/apple/tools/code_signing/app_id.py +++ b/prelude/apple/tools/code_signing/app_id.py @@ -36,7 +36,12 @@ class _ReGroupName(str, Enum): def from_string(cls, string: str) -> AppId: match = re.match(cls._re_pattern, string) if not match: - raise RuntimeError("Malformed app ID string: {}".format(string)) + raise RuntimeError( + "Malformed app ID string: '{}'. " + "We expected a prefix of a ten-character alphanumeric sequence and a Bundle ID which may be a fully-qualified name or a wildcard ending in '*'.".format( + string + ) + ) return AppId( match.group(cls._ReGroupName.team_id), match.group(cls._ReGroupName.bundle_id), @@ -45,8 +50,13 @@ def from_string(cls, string: str) -> AppId: # Returns the App ID if it can be inferred from keys in the entitlement. Otherwise, it returns `None`. @staticmethod def infer_from_entitlements(entitlements: Dict[str, Any]) -> Optional[AppId]: - keychain_access_groups = entitlements.get("keychain-access-groups") - if not keychain_access_groups: - return None - app_id_string = keychain_access_groups[0] - return AppId.from_string(app_id_string) + try: + keychain_access_groups = entitlements.get("keychain-access-groups") + if not keychain_access_groups: + return None + app_id_string = keychain_access_groups[0] + return AppId.from_string(app_id_string) + except Exception as e: + raise RuntimeError( + "Error when parsing the entitlements for the app ID: {}".format(e) + ) diff --git a/prelude/apple/tools/code_signing/app_id_test.py b/prelude/apple/tools/code_signing/app_id_test.py index 2875a129a03..ab1c32bb83a 100644 --- a/prelude/apple/tools/code_signing/app_id_test.py +++ b/prelude/apple/tools/code_signing/app_id_test.py @@ -23,14 +23,36 @@ def test_string_parsing(self): expected = AppId("ABCDE12345", "*") self.assertEqual(expected, result) - with self.assertRaisesRegex(RuntimeError, "Malformed app ID string: invalid."): + with self.assertRaisesRegex( + RuntimeError, + "Malformed app ID string: 'invalid.'. We expected a prefix of a ten-character alphanumeric sequence and a Bundle ID which may be a fully-qualified name or a wildcard ending in '*'.", + ): _ = AppId.from_string("invalid.") def test_entitlements_parsing(self): - file = pkg_resources.resource_stream( + with pkg_resources.resource_stream( __name__, "test_resources/Entitlements.plist" - ) - entitlements = plistlib.load(file) - result = AppId.infer_from_entitlements(entitlements) - expected = AppId("ABCDE12345", "com.example.TestApp") - self.assertEqual(expected, result) + ) as file: + entitlements = plistlib.load(file) + result = AppId.infer_from_entitlements(entitlements) + expected = AppId("ABCDE12345", "com.example.TestApp") + self.assertEqual(expected, result) + + invalid_file = b""" + + + + keychain-access-groups + + com.facebook.CommonTestHost + p + + """ + + invalid_entitlement = plistlib.loads(invalid_file) + with self.assertRaisesRegex( + RuntimeError, + "Error when parsing the entitlements for the app ID: Malformed app ID string: 'com.facebook.CommonTestHost'. " + "We expected a prefix of a ten-character alphanumeric sequence and a Bundle ID which may be a fully-qualified name or a wildcard ending in '*'.", + ): + AppId.infer_from_entitlements(invalid_entitlement) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index a4145761c4b..46307f9f570 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -8,6 +8,7 @@ # pyre-strict import asyncio +import importlib.resources import logging import os import shutil @@ -29,6 +30,7 @@ ICodesignCommandFactory, ) from .fast_adhoc import is_fast_adhoc_codesign_allowed, should_skip_adhoc_signing_path +from .identity import CodeSigningIdentity from .info_plist_metadata import InfoPlistMetadata from .list_codesign_identities import IListCodesignIdentities from .prepare_code_signing_entitlements import prepare_code_signing_entitlements @@ -36,6 +38,7 @@ from .provisioning_profile_diagnostics import ( interpret_provisioning_profile_diagnostics, META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK, + META_IOS_PROVISIONING_PROFILES_COMMAND, META_IOS_PROVISIONING_PROFILES_LINK, ) from .provisioning_profile_metadata import ProvisioningProfileMetadata @@ -56,6 +59,33 @@ _LOGGER: logging.Logger = logging.getLogger(__name__) +@dataclass +class CodesignedPath: + path: Path + """ + Path relative to bundle root which needs to be codesigned + """ + entitlements: Optional[Path] + """ + Path to entitlements to be used when codesigning, relative to buck project + """ + flags: List[str] + """ + Flags to be passed to codesign command when codesigning this particular path + """ + + +def _log_codesign_identities(identities: List[CodeSigningIdentity]) -> None: + if len(identities) == 0: + _LOGGER.warning("ZERO codesign identities available") + else: + _LOGGER.info("Listing available codesign identities") + for identity in identities: + _LOGGER.info( + f" Subject Common Name: {identity.subject_common_name}, Fingerprint: {identity.fingerprint}" + ) + + def _select_provisioning_profile( info_plist_metadata: InfoPlistMetadata, provisioning_profiles_dir: Path, @@ -63,10 +93,15 @@ def _select_provisioning_profile( platform: ApplePlatform, list_codesign_identities: IListCodesignIdentities, should_use_fast_provisioning_profile_parsing: bool, - read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory = _default_read_provisioning_profile_command_factory, + strict_provisioning_profile_search: bool, + provisioning_profile_filter: Optional[str], log_file_path: Optional[Path] = None, ) -> SelectedProvisioningProfileInfo: + read_provisioning_profile_command_factory = ( + _default_read_provisioning_profile_command_factory + ) identities = list_codesign_identities.list_codesign_identities() + _log_codesign_identities(identities) _LOGGER.info( f"Fast provisioning profile parsing enabled: {should_use_fast_provisioning_profile_parsing}" ) @@ -85,7 +120,12 @@ def _select_provisioning_profile( ) if not provisioning_profiles: raise CodeSignProvisioningError( - f"\n\nFailed to find any provisioning profiles. Please make sure to install required provisioning profiles and make sure they are located at '{provisioning_profiles_dir}'.\n\nPlease follow the wiki to build & run on device: {META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK}.\nProvisioning profiles for your app can be downloaded from {META_IOS_PROVISIONING_PROFILES_LINK}.\n" + ( + f"\n\nFailed to find any provisioning profiles. Please make sure to install required provisioning profiles and make sure they are located at '{provisioning_profiles_dir}'.\n\n" + f"Execute `{META_IOS_PROVISIONING_PROFILES_COMMAND}` to download the profiles.\n" + f"Please follow the wiki to build & run on device: {META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK}.\n" + f"Provisioning profiles for your app can also be downloaded from {META_IOS_PROVISIONING_PROFILES_LINK}.\n" + ) ) entitlements = _read_entitlements_file(entitlements_path) selected_profile_info, mismatches = select_best_provisioning_profile( @@ -94,6 +134,8 @@ def _select_provisioning_profile( provisioning_profiles, entitlements, platform, + strict_provisioning_profile_search, + provisioning_profile_filter, ) if selected_profile_info is None: if not mismatches: @@ -143,6 +185,8 @@ def signing_context_with_profile_selection( list_codesign_identities: IListCodesignIdentities, log_file_path: Optional[Path] = None, should_use_fast_provisioning_profile_parsing: bool = False, + strict_provisioning_profile_search: bool = False, + provisioning_profile_filter: Optional[str] = None, ) -> SigningContextWithProfileSelection: with open(info_plist_source, mode="rb") as info_plist_file: info_plist_metadata = InfoPlistMetadata.from_file(info_plist_file) @@ -154,6 +198,8 @@ def signing_context_with_profile_selection( list_codesign_identities=list_codesign_identities, log_file_path=log_file_path, should_use_fast_provisioning_profile_parsing=should_use_fast_provisioning_profile_parsing, + strict_provisioning_profile_search=strict_provisioning_profile_search, + provisioning_profile_filter=provisioning_profile_filter, ) return SigningContextWithProfileSelection( @@ -171,12 +217,10 @@ class CodesignConfiguration(str, Enum): def codesign_bundle( - bundle_path: Path, + bundle_path: CodesignedPath, signing_context: Union[AdhocSigningContext, SigningContextWithProfileSelection], - entitlements_path: Optional[Path], platform: ApplePlatform, - codesign_on_copy_paths: List[str], - codesign_args: List[str], + codesign_on_copy_paths: List[CodesignedPath], codesign_tool: Optional[Path] = None, codesign_configuration: Optional[CodesignConfiguration] = None, ) -> None: @@ -191,12 +235,13 @@ def codesign_bundle( ) if selection_profile_context: - prepared_entitlements_path = _prepare_entitlements_and_info_plist( - bundle_path=bundle_path, - entitlements_path=entitlements_path, - platform=platform, - signing_context=selection_profile_context, - tmp_dir=tmp_dir, + bundle_path_with_prepared_entitlements = ( + _prepare_entitlements_and_info_plist( + bundle_path=bundle_path, + platform=platform, + signing_context=selection_profile_context, + tmp_dir=tmp_dir, + ) ) selected_identity_fingerprint = ( selection_profile_context.selected_profile_info.identity.fingerprint @@ -210,7 +255,7 @@ def codesign_bundle( raise AssertionError( "Expected no profile selection context in `AdhocSigningContext` when `selection_profile_context` is `None`." ) - prepared_entitlements_path = entitlements_path + bundle_path_with_prepared_entitlements = bundle_path selected_identity_fingerprint = signing_context.codesign_identity if codesign_configuration is CodesignConfiguration.dryRun: @@ -219,14 +264,12 @@ def codesign_bundle( "Expected codesign tool not to be the default one when dry run codesigning is requested." ) _dry_codesign_everything( - bundle_path=bundle_path, + root=bundle_path_with_prepared_entitlements, codesign_on_copy_paths=codesign_on_copy_paths, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_tool=codesign_tool, - entitlements=prepared_entitlements_path, platform=platform, - codesign_args=codesign_args, ) else: fast_adhoc_signing_enabled = ( @@ -235,29 +278,26 @@ def codesign_bundle( ) _LOGGER.info(f"Fast adhoc signing enabled: {fast_adhoc_signing_enabled}") _codesign_everything( - bundle_path=bundle_path, + root=bundle_path_with_prepared_entitlements, codesign_on_copy_paths=codesign_on_copy_paths, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), - entitlements=prepared_entitlements_path, platform=platform, fast_adhoc_signing=fast_adhoc_signing_enabled, - codesign_args=codesign_args, ) def _prepare_entitlements_and_info_plist( - bundle_path: Path, - entitlements_path: Optional[Path], + bundle_path: CodesignedPath, platform: ApplePlatform, signing_context: SigningContextWithProfileSelection, tmp_dir: str, -) -> Path: +) -> CodesignedPath: info_plist_metadata = signing_context.info_plist_metadata selected_profile = signing_context.selected_profile_info.profile prepared_entitlements_path = prepare_code_signing_entitlements( - entitlements_path, + bundle_path.entitlements, info_plist_metadata.bundle_id, selected_profile, tmp_dir, @@ -270,13 +310,17 @@ def _prepare_entitlements_and_info_plist( ) os.replace( prepared_info_plist_path, - bundle_path / signing_context.info_plist_destination, + bundle_path.path / signing_context.info_plist_destination, ) shutil.copy2( selected_profile.file_path, - bundle_path / platform.embedded_provisioning_profile_path(), + bundle_path.path / platform.embedded_provisioning_profile_path(), + ) + return CodesignedPath( + path=bundle_path.path, + entitlements=prepared_entitlements_path, + flags=bundle_path.flags, ) - return prepared_entitlements_path async def _fast_read_provisioning_profiles_async( @@ -380,20 +424,17 @@ def _read_entitlements_file(path: Optional[Path]) -> Optional[Dict[str, Any]]: def _dry_codesign_everything( - bundle_path: Path, - codesign_on_copy_paths: List[str], + root: CodesignedPath, + codesign_on_copy_paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_tool: Path, - entitlements: Optional[Path], platform: ApplePlatform, - codesign_args: List[str], ) -> None: codesign_command_factory = DryRunCodesignCommandFactory(codesign_tool) - codesign_on_copy_abs_paths = [bundle_path / path for path in codesign_on_copy_paths] codesign_on_copy_directory_paths = [ - p for p in codesign_on_copy_abs_paths if p.is_dir() + p for p in codesign_on_copy_paths if p.path.is_dir() ] # First sign codesign-on-copy directory paths @@ -402,15 +443,15 @@ def _dry_codesign_everything( identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=None, platform=platform, - codesign_args=codesign_args, ) # Dry codesigning creates a .plist inside every directory it signs. # That approach doesn't work for files so those files are written into .plist for root bundle. codesign_on_copy_file_paths = [ - p.relative_to(bundle_path) for p in codesign_on_copy_abs_paths if p.is_file() + p.path.relative_to(root.path) + for p in codesign_on_copy_paths + if p.path.is_file() ] codesign_command_factory.set_codesign_on_copy_file_paths( codesign_on_copy_file_paths @@ -418,60 +459,56 @@ def _dry_codesign_everything( # Lastly sign whole bundle _codesign_paths( - paths=[bundle_path], + paths=[root], identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=entitlements, platform=platform, - codesign_args=codesign_args, ) def _codesign_everything( - bundle_path: Path, - codesign_on_copy_paths: List[str], + root: CodesignedPath, + codesign_on_copy_paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], platform: ApplePlatform, fast_adhoc_signing: bool, - codesign_args: List[str], ) -> None: # First sign codesign-on-copy paths codesign_on_copy_filtered_paths = _filter_out_fast_adhoc_paths( - paths=[bundle_path / path for path in codesign_on_copy_paths], + paths=codesign_on_copy_paths, identity_fingerprint=identity_fingerprint, - entitlements=entitlements, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) + # If we have > 1 paths to sign (including root bundle), access keychain first to avoid user playing whack-a-mole + # with permission grant dialog windows. + if codesign_on_copy_filtered_paths: + obtain_keychain_permissions( + identity_fingerprint, tmp_dir, codesign_command_factory + ) _codesign_paths( codesign_on_copy_filtered_paths, identity_fingerprint, tmp_dir, codesign_command_factory, - None, platform, - codesign_args, ) # Lastly sign whole bundle - root_bundle_paths = _filter_out_fast_adhoc_paths( - paths=[bundle_path], + root_filtered_paths = _filter_out_fast_adhoc_paths( + paths=[root], identity_fingerprint=identity_fingerprint, - entitlements=entitlements, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) _codesign_paths( - root_bundle_paths, + root_filtered_paths, identity_fingerprint, tmp_dir, codesign_command_factory, - entitlements, platform, - codesign_args, ) @@ -520,28 +557,24 @@ def _spawn_process( def _spawn_codesign_process( - path: Path, + path: CodesignedPath, identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], stack: ExitStack, - codesign_args: List[str], ) -> ParallelProcess: command = codesign_command_factory.codesign_command( - path, identity_fingerprint, entitlements, codesign_args + path.path, identity_fingerprint, path.entitlements, path.flags ) return _spawn_process(command=command, tmp_dir=tmp_dir, stack=stack) def _codesign_paths( - paths: List[Path], + paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], platform: ApplePlatform, - codesign_args: List[str], ) -> None: """Codesigns several paths in parallel.""" processes: List[ParallelProcess] = [] @@ -552,9 +585,7 @@ def _codesign_paths( identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=entitlements, stack=stack, - codesign_args=codesign_args, ) processes.append(process) for p in processes: @@ -564,12 +595,11 @@ def _codesign_paths( def _filter_out_fast_adhoc_paths( - paths: List[Path], + paths: List[CodesignedPath], identity_fingerprint: str, - entitlements: Optional[Path], platform: ApplePlatform, fast_adhoc_signing: bool, -) -> List[Path]: +) -> List[CodesignedPath]: if not fast_adhoc_signing: return paths # TODO(T149863217): Make skip checks run in parallel, they're usually fast (~15ms) @@ -578,6 +608,30 @@ def _filter_out_fast_adhoc_paths( p for p in paths if not should_skip_adhoc_signing_path( - p, identity_fingerprint, entitlements, platform + p.path, identity_fingerprint, p.entitlements, platform ) ] + + +def obtain_keychain_permissions( + identity_fingerprint: str, + tmp_dir: str, + codesign_command_factory: ICodesignCommandFactory, +) -> None: + with ExitStack() as stack, importlib.resources.path( + __package__, "dummy_binary_for_signing" + ) as dummy_binary_path: + # Copy the binary to avoid races vs other bundling actions + dummy_binary_copied = os.path.join(tmp_dir, "dummy_binary_for_signing") + shutil.copyfile(dummy_binary_path, dummy_binary_copied, follow_symlinks=True) + p = _spawn_codesign_process( + path=CodesignedPath( + path=Path(dummy_binary_copied), entitlements=None, flags=[] + ), + identity_fingerprint=identity_fingerprint, + tmp_dir=tmp_dir, + codesign_command_factory=codesign_command_factory, + stack=stack, + ) + p.process.wait() + p.check_result() diff --git a/prelude/apple/tools/code_signing/dummy_binary_for_signing.c b/prelude/apple/tools/code_signing/dummy_binary_for_signing.c new file mode 100644 index 00000000000..22b15a7fa58 --- /dev/null +++ b/prelude/apple/tools/code_signing/dummy_binary_for_signing.c @@ -0,0 +1,12 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under both the MIT license found in the + * LICENSE-MIT file in the root directory of this source tree and the Apache + * License, Version 2.0 found in the LICENSE-APACHE file in the root directory + * of this source tree. + */ + +int main() { + return 0; +} diff --git a/prelude/apple/tools/code_signing/fast_adhoc.py b/prelude/apple/tools/code_signing/fast_adhoc.py index 9d79c57e8d2..10bb78934be 100644 --- a/prelude/apple/tools/code_signing/fast_adhoc.py +++ b/prelude/apple/tools/code_signing/fast_adhoc.py @@ -88,6 +88,12 @@ def should_skip_adhoc_signing_path( _LOGGER.info(" Requested non-adhoc signing, not adhoc skipping signing") return False + if "libclang_rt" in str(path): + # Sanitizer runtime dylibs require re-signing, even though they're already pre-signed. + # Otherwise, `codesign` fails to sign the top-level bundle (as the adhoc pre-signed + # sanitizer dylibs have been signed within a different context). + return False + codesign_args = ["/usr/bin/codesign", "-d", "-v", path] codesign_result = _logged_subprocess_run( "codesign", "check pre-existing signature", codesign_args diff --git a/prelude/apple/tools/code_signing/info_plist_metadata.py b/prelude/apple/tools/code_signing/info_plist_metadata.py index 7778c7defc4..beb99b5ead9 100644 --- a/prelude/apple/tools/code_signing/info_plist_metadata.py +++ b/prelude/apple/tools/code_signing/info_plist_metadata.py @@ -27,5 +27,5 @@ def from_file(info_plist_file: IO[bytes]) -> InfoPlistMetadata: return InfoPlistMetadata( root["CFBundleIdentifier"], root.get("CFBundlePackageType"), - root.get("WKWatchKitApp", False), + root.get("WKApplication", False), ) diff --git a/prelude/apple/tools/code_signing/info_plist_metadata_test.py b/prelude/apple/tools/code_signing/info_plist_metadata_test.py index 9b0c91b060e..98ab53adac3 100644 --- a/prelude/apple/tools/code_signing/info_plist_metadata_test.py +++ b/prelude/apple/tools/code_signing/info_plist_metadata_test.py @@ -22,7 +22,7 @@ def test_canary(self): com.company.application CFBundlePackageType APPL - WKWatchKitApp + WKApplication diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index eba6fbf2a92..dd6ed288db6 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -7,86 +7,122 @@ # pyre-strict -import argparse import pathlib import sys +from typing import List, Optional + +from tap import Tap from .apple_platform import ApplePlatform from .codesign_bundle import ( AdhocSigningContext, codesign_bundle, + CodesignedPath, signing_context_with_profile_selection, ) from .list_codesign_identities import ListCodesignIdentities from .provisioning_profile_selection import CodeSignProvisioningError -def _args_parser() -> argparse.ArgumentParser: - parser = argparse.ArgumentParser( - description="Tool which code signs the Apple bundle. `Info.plist` file is amended as a part of it." - ) - parser.add_argument( - "--bundle-path", - metavar="", - type=pathlib.Path, - required=True, - help="Absolute path to Apple bundle result.", - ) - parser.add_argument( - "--info-plist", - metavar="", - type=pathlib.Path, - required=True, - help="Bundle relative destination path to Info.plist file if it is present in bundle.", - ) - parser.add_argument( - "--entitlements", - metavar="", - type=pathlib.Path, - required=False, - help="Path to file with entitlements to be used during code signing. If it's not provided the minimal entitlements are going to be generated.", - ) - parser.add_argument( - "--profiles-dir", - metavar="", - type=pathlib.Path, - required=False, - help="Path to directory with provisioning profile files. Required if code signing is not ad-hoc.", - ) - parser.add_argument( - "--ad-hoc", - action="store_true", - help="Perform ad-hoc signing if set.", - ) - parser.add_argument( - "--ad-hoc-codesign-identity", - metavar="", - type=str, - required=False, - help="Codesign identity to use when ad-hoc signing is performed.", - ) - parser.add_argument( - "--platform", - metavar="", - type=ApplePlatform, - required=True, - help="Apple platform for which the bundle was built.", - ) - parser.add_argument( - "--codesign-on-copy", - metavar="", - type=pathlib.Path, - action="append", - required=False, - help="Bundle relative path that should be codesigned prior to result bundle.", - ) - parser.add_argument( - "--fast-provisioning-profile-parsing", - action="store_true", - help="Uses experimental faster provisioning profile parsing.", - ) - - return parser +class Arguments(Tap): + """ + Tool which code signs the Apple bundle. `Info.plist` file is amended as a part of it. + """ + + # pyre-fixme[13]: Attribute `bundle_path` is never initialized. + bundle_path: pathlib.Path + # pyre-fixme[13]: Attribute `info_plist` is never initialized. + info_plist: pathlib.Path + entitlements: Optional[pathlib.Path] = None + profiles_dir: Optional[pathlib.Path] = None + ad_hoc: bool = False + ad_hoc_codesign_identity: Optional[str] = None + # pyre-fixme[13]: Attribute `platform` is never initialized. + platform: ApplePlatform + codesign_on_copy: Optional[List[pathlib.Path]] = None + fast_provisioning_profile_parsing: bool = False + strict_provisioning_profile_search: bool = False + provisioning_profile_filter: Optional[str] = None + + def configure(self) -> None: + """ + Configure the arguments. + """ + self.add_argument( + "--bundle-path", + metavar="", + type=pathlib.Path, + required=True, + help="Absolute path to Apple bundle result.", + ) + self.add_argument( + "--info-plist", + metavar="", + type=pathlib.Path, + required=True, + help="Bundle relative destination path to Info.plist file if it is present in bundle.", + ) + self.add_argument( + "--entitlements", + metavar="", + type=pathlib.Path, + required=False, + help="Path to file with entitlements to be used during code signing. If it's not provided the minimal entitlements are going to be generated.", + ) + self.add_argument( + "--profiles-dir", + metavar="", + type=pathlib.Path, + required=False, + help="Path to directory with provisioning profile files. Required if code signing is not ad-hoc.", + ) + self.add_argument( + "--ad-hoc", + action="store_true", + required=False, + help="Perform ad-hoc signing if set.", + ) + self.add_argument( + "--ad-hoc-codesign-identity", + metavar="", + type=str, + required=False, + help="Codesign identity to use when ad-hoc signing is performed.", + ) + self.add_argument( + "--platform", + metavar="", + type=ApplePlatform, + required=True, + help="Apple platform for which the bundle was built.", + ) + self.add_argument( + "--codesign-on-copy", + metavar="", + type=pathlib.Path, + action="append", + required=False, + help="Bundle relative path that should be codesigned prior to result bundle.", + ) + self.add_argument( + "--fast-provisioning-profile-parsing", + action="store_true", + required=False, + help="Uses experimental faster provisioning profile parsing.", + ) + self.add_argument( + "--strict-provisioning-profile-search", + action="store_true", + required=False, + help="Fail code signing if more than one matching profile found.", + ) + self.add_argument( + "--provisioning-profile-filter", + metavar="", + type=str, + required=False, + help="Regex to disambiguate multiple matching profiles, evaluated against provisioning profile filename.", + ) # Add emoji to beginning of actionable error message so it stands out more. @@ -95,7 +131,7 @@ def decorate_error_message(message: str) -> str: def _main() -> None: - args = _args_parser().parse_args() + args = Arguments().parse_args() try: if args.ad_hoc: signing_context = AdhocSigningContext( @@ -105,22 +141,38 @@ def _main() -> None: assert ( args.profiles_dir ), "Path to directory with provisioning profile files should be set when signing is not ad-hoc." + non_optional_profiles_dir = args.profiles_dir signing_context = signing_context_with_profile_selection( info_plist_source=args.bundle_path / args.info_plist, info_plist_destination=args.info_plist, - provisioning_profiles_dir=args.profiles_dir, + provisioning_profiles_dir=non_optional_profiles_dir, entitlements_path=args.entitlements, list_codesign_identities=ListCodesignIdentities.default(), platform=args.platform, should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, + strict_provisioning_profile_search=args.strict_provisioning_profile_search, + provisioning_profile_filter=args.provisioning_profile_filter, ) + + bundle_path = CodesignedPath( + path=args.bundle_path, entitlements=args.entitlements, flags=[] + ) + codesign_on_copy_paths = ( + [ + CodesignedPath( + path=bundle_path.path / path, entitlements=None, flags=[] + ) + for path in args.codesign_on_copy + ] + if args.codesign_on_copy + else [] + ) + codesign_bundle( - bundle_path=args.bundle_path, + bundle_path=bundle_path, signing_context=signing_context, - entitlements_path=args.entitlements, platform=args.platform, - codesign_on_copy_paths=args.codesign_on_copy or [], - codesign_args=[], + codesign_on_copy_paths=codesign_on_copy_paths, ) except CodeSignProvisioningError as e: print(decorate_error_message(str(e)), file=sys.stderr) diff --git a/prelude/apple/tools/code_signing/prepare_info_plist_test.py b/prelude/apple/tools/code_signing/prepare_info_plist_test.py index dadaca18f0e..bf4d0061d41 100644 --- a/prelude/apple/tools/code_signing/prepare_info_plist_test.py +++ b/prelude/apple/tools/code_signing/prepare_info_plist_test.py @@ -53,7 +53,7 @@ def test_app_id_set_for_non_watchos_apps(self): info_plist = { "CFBundleIdentifier": "com.facebook.test", "CFBundlePackageType": "APPL", - "WKWatchKitApp": True, + "WKApplication": True, } info_plist_path, info_plist_metadata = _write_info_plist( info_plist, tmp_dir, "Info.plist" diff --git a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py index 1187cab2c25..92f9a99f21d 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py @@ -23,6 +23,11 @@ META_IOS_PROVISIONING_PROFILES_LINK: str = ( "https://www.internalfb.com/intern/apple/download-provisioning-profile/" ) +META_IOS_PROVISIONING_PROFILES_COMMAND: str = ( + "arc ios-certs --download-provisioning-profiles" +) +META_IOS_CERTS_ALL_COMMAND: str = "arc ios-certs --all" +# TODO(T197258387): Remove references to `arc download-provisioning-profile` in this wiki page. META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK: str = ( "https://www.internalfb.com/intern/wiki/Ios-first-steps/running-on-device/#2-register-your-device-i" ) @@ -196,12 +201,12 @@ def find_mismatch(class_type: Type[_T]) -> Optional[_T]: if len(identities) == 0 else f"List of signing identities: `{identities}`." ) - return "".join( + return "\n".join( [ header, f"The provisioning profile `{mismatch.profile.file_path.name}` satisfies all constraints, but no matching certificates were found in your keychain. ", identities_description, - f"Please download and install the latest certificate from {META_IOS_DEVELOPER_CERTIFICATE_LINK}.", + f"Execute `{META_IOS_CERTS_ALL_COMMAND}` or download and install the latest certificate from {META_IOS_DEVELOPER_CERTIFICATE_LINK}.", footer, ] ) @@ -212,7 +217,8 @@ def find_mismatch(class_type: Type[_T]) -> Optional[_T]: header, f"The provisioning profile `{mismatch.profile.file_path.name}` is the best match, but it doesn't contain all the needed entitlements. ", f"Expected entitlement item with key `{mismatch.mismatched_key}` and value `{mismatch.mismatched_value}` is missing. ", - f"Usually that means the application entitlements were changed recently, provisioning profile was updated and you need to download & install the latest version of provisioning profile for Bundle ID `{bundle_id}` from {META_IOS_PROVISIONING_PROFILES_LINK}", + f"Usually that means the application entitlements were changed recently, provisioning profile was updated and you need to download & install the latest version of provisioning profile for Bundle ID `{bundle_id}`.", + f"Execute `{META_IOS_PROVISIONING_PROFILES_COMMAND}` or download from from {META_IOS_PROVISIONING_PROFILES_LINK}", footer, ] ) @@ -233,7 +239,8 @@ def find_mismatch(class_type: Type[_T]) -> Optional[_T]: [ header, f"The provisioning profile `{mismatch.profile.file_path.name}` is the the best match; however, it has expired", - f"Please download and install a valid profile from {META_IOS_PROVISIONING_PROFILES_LINK}", + f"Execute `{META_IOS_PROVISIONING_PROFILES_COMMAND}` to get the latest provisioning profiles.", + f"Alternatively, please download and install a valid profile from {META_IOS_PROVISIONING_PROFILES_LINK}", footer, ] ) @@ -242,7 +249,8 @@ def find_mismatch(class_type: Type[_T]) -> Optional[_T]: [ header, f"No provisioning profile matching the Bundle ID `{bundle_id}` was found. ", - f"Please download and install the appropriate profile from {META_IOS_PROVISIONING_PROFILES_LINK}", + f"Execute `{META_IOS_PROVISIONING_PROFILES_COMMAND}` to get the latest provisioning profiles.", + f"Alternatively, please download and install the appropriate profile from {META_IOS_PROVISIONING_PROFILES_LINK}", footer, ] ) diff --git a/prelude/apple/tools/code_signing/provisioning_profile_selection.py b/prelude/apple/tools/code_signing/provisioning_profile_selection.py index c927b1d6fa9..5e3c4a9b59b 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_selection.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_selection.py @@ -9,6 +9,8 @@ import datetime import logging +import re +from collections import defaultdict from dataclasses import dataclass from typing import Any, cast, Dict, List, Optional, Tuple @@ -135,12 +137,54 @@ def _check_developer_certificates_match( ) +def _make_multiple_matching_profiles_message( + profiles: list[ProvisioningProfileMetadata], + strict_search: bool, +) -> str: + messages = [f"Found MULTIPLE matching profiles: {len(profiles)}"] + messages += [ + f" Matching Profile = UUID:{profile.uuid}, file path: {profile.file_path}" + for profile in profiles + ] + + if strict_search: + messages += [ + "Strict provisioning profile search is ENABLED, build will FAIL due to ambiguous provisioning profile search results.", + "To resolve the problem, ensure only a single profile matches.", + "To unblock, you have two options:", + "Option 1: Disable strict provisioning profile search for the targets failing to build.", + " If the target failing to build is an `apple_bundle()`, set the `strict_provisioning_profile_search` attribute to `False`.", + " If the target failing to build is produced by `ios_binary()`, set the `bundle_strict_provisioning_profile_search` attribute to `False`.", + " You can commit such a change, so that the issue can be investigated without blocking other developers.", + " NB: This is a TEMPORARY WORKAROUND, as it only disables the strict checking, it does not resolve the ambiguity.", + "Option 2: Pass `--config apple.strict_provisioning_profile_search=false` as part of your build command.", + " DO NOT COMMIT such a change by adding this to any CI configs.", + ] + + return "\n".join(messages) + + @dataclass class SelectedProvisioningProfileInfo: profile: ProvisioningProfileMetadata identity: CodeSigningIdentity +def _filter_matching_selected_provisioning_profile_infos( + selected_profile_infos: list[SelectedProvisioningProfileInfo], + provisioning_profile_filter: Optional[str], +) -> list[SelectedProvisioningProfileInfo]: + if len(selected_profile_infos) <= 1 or (not provisioning_profile_filter): + return selected_profile_infos + + preference_regex = re.compile(provisioning_profile_filter) + return [ + matching_profile_info + for matching_profile_info in selected_profile_infos + if preference_regex.search(matching_profile_info.profile.file_path.name) + ] + + # See `ProvisioningProfileStore::getBestProvisioningProfile` in `ProvisioningProfileStore.java` for Buck v1 equivalent def select_best_provisioning_profile( info_plist_metadata: InfoPlistMetadata, @@ -148,6 +192,8 @@ def select_best_provisioning_profile( provisioning_profiles: List[ProvisioningProfileMetadata], entitlements: Optional[Dict[str, Any]], platform: ApplePlatform, + strict_search: bool, + provisioning_profile_filter: Optional[str], ) -> Tuple[ Optional[SelectedProvisioningProfileInfo], List[IProvisioningProfileDiagnostics] ]: @@ -169,7 +215,6 @@ def select_best_provisioning_profile( maybe_team_id_constraint = _parse_team_id_from_entitlements(entitlements) best_match_length = -1 - result = None # Used for error messages diagnostics: List[IProvisioningProfileDiagnostics] = [] @@ -180,6 +225,8 @@ def log_mismatched_profile(mismatch: IProvisioningProfileDiagnostics) -> None: f"Skipping provisioning profile `{mismatch.profile.file_path.name}`: {mismatch.log_message()}" ) + selected_profile_infos_for_match_length = defaultdict(list) + for profile in provisioning_profiles: app_id = profile.get_app_id() if maybe_team_id_constraint and maybe_team_id_constraint != app_id.team_id: @@ -250,8 +297,58 @@ def log_mismatched_profile(mismatch: IProvisioningProfileDiagnostics) -> None: _LOGGER.info( f"Matching provisioning profile `{profile.file_path.name}` with score {current_match_length}" ) + + selected_profile_info = SelectedProvisioningProfileInfo(profile, certificate) + selected_profile_infos_for_match_length[current_match_length] += [ + selected_profile_info + ] + if current_match_length > best_match_length: best_match_length = current_match_length - result = SelectedProvisioningProfileInfo(profile, certificate) + + all_matching_selected_profile_infos = selected_profile_infos_for_match_length.get( + best_match_length, [] + ) + + all_matching_selected_profile_infos = ( + _filter_matching_selected_provisioning_profile_infos( + all_matching_selected_profile_infos, provisioning_profile_filter + ) + ) + + if len(all_matching_selected_profile_infos) > 1: + all_matching_profiles = [ + selected_profile_info.profile + for selected_profile_info in all_matching_selected_profile_infos + ] + multiple_profiles_message = _make_multiple_matching_profiles_message( + all_matching_profiles, + strict_search, + ) + _LOGGER.info(multiple_profiles_message) + if strict_search: + raise CodeSignProvisioningError(multiple_profiles_message) + + result = ( + # By definition, we always pick the _last_ matching prov profile + all_matching_selected_profile_infos[-1] + if all_matching_selected_profile_infos + else None + ) + + if result: + _LOGGER.info( + ( + f"Found matching provisioning profile and identity\n" + f" Selected Identity: {result.identity}\n" + f" Provisioning Profile: `{result.profile.file_path.name}`\n" + f" UUID: {result.profile.uuid}\n" + f" File: {result.profile.file_path}\n" + f" Expiration: {result.profile.expiration_date}\n" + f" Platforms: {result.profile.platforms}\n" + f" Fingerprints: {result.profile.developer_certificate_fingerprints}\n" + f" Entitlements: {result.profile.entitlements}" + ) + ) return result, diagnostics diff --git a/prelude/apple/tools/code_signing/provisioning_profile_selection_test.py b/prelude/apple/tools/code_signing/provisioning_profile_selection_test.py index 5a06e61065d..8d7f213330d 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_selection_test.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_selection_test.py @@ -55,6 +55,8 @@ def test_expired_profiles_are_ignored(self): [expired_provisioning_profile], {}, ApplePlatform.ios_device, + False, + None, ) self.assertIsNone(selected) self.verify_diagnostic_info_candidate_profile( @@ -70,9 +72,119 @@ def test_expired_profiles_are_ignored(self): [fresh_provisioning_profiles], {}, ApplePlatform.ios_device, + False, + None, ) self.assertIsNotNone(selected) + def test_multiple_matching_profiles_strict_mode(self): + info_plist = InfoPlistMetadata("com.company.application", None, False) + identity = CodeSigningIdentity( + "fingerprint", + "name", + ) + first = ProvisioningProfileMetadata( + Path("/foo.first"), + "00000000-0000-0000-0000-000000000000", + datetime.max, + {"iOS"}, + {identity.fingerprint}, + {"application-identifier": "AAAAAAAAAA.*"}, + ) + second = ProvisioningProfileMetadata( + Path("/foo.second"), + "00000000-0000-0000-0000-000000000000", + datetime.max, + {"iOS"}, + {identity.fingerprint}, + {"application-identifier": "AAAAAAAAAA.*"}, + ) + profiles = [ + first, + second, + ] + + selection_failed = False + try: + _, _ = select_best_provisioning_profile( + info_plist, + [identity], + profiles, + {"keychain-access-groups": ["AAAAAAAAAA.*"]}, + ApplePlatform.ios_device, + True, + None, + ) + except Exception: + selection_failed = True + + self.assertTrue(selection_failed) + + def test_multiple_matching_profiles_with_preference(self): + info_plist = InfoPlistMetadata("com.company.application", None, False) + identity = CodeSigningIdentity( + "fingerprint", + "name", + ) + first = ProvisioningProfileMetadata( + Path("/foo.first"), + "00000000-0000-0000-0000-000000000000", + datetime.max, + {"iOS"}, + {identity.fingerprint}, + {"application-identifier": "AAAAAAAAAA.*"}, + ) + second = ProvisioningProfileMetadata( + Path("/foo.second"), + "00000000-0000-0000-0000-000000000000", + datetime.max, + {"iOS"}, + {identity.fingerprint}, + {"application-identifier": "AAAAAAAAAA.*"}, + ) + third = ProvisioningProfileMetadata( + Path("/foo.third"), + "00000000-0000-0000-0000-000000000000", + datetime.max, + {"iOS"}, + {identity.fingerprint}, + {"application-identifier": "AAAAAAAAAA.*"}, + ) + profiles = [ + first, + second, + third, + ] + + selection_failed = False + try: + _, _ = select_best_provisioning_profile( + info_plist, + [identity], + profiles, + {"keychain-access-groups": ["AAAAAAAAAA.*"]}, + ApplePlatform.ios_device, + True, + None, + ) + except Exception: + selection_failed = True + + # Check selection fails without preference + self.assertTrue(selection_failed) + + selected_profile_info, _ = select_best_provisioning_profile( + info_plist, + [identity], + profiles, + {"keychain-access-groups": ["AAAAAAAAAA.*"]}, + ApplePlatform.ios_device, + True, + ".+second", + ) + # Check the middle profile got chosen (i.e., not first or last) + self.assertEqual(selected_profile_info.profile, second) + def test_prefix_override(self): info_plist = InfoPlistMetadata("com.company.application", None, False) identity = CodeSigningIdentity( @@ -104,6 +216,8 @@ def test_prefix_override(self): profiles, {"keychain-access-groups": ["AAAAAAAAAA.*"]}, ApplePlatform.ios_device, + False, + None, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -153,6 +267,8 @@ def test_entitlement_keys_are_matched(self): "com.apple.security.application-groups": ["foo", "bar"], }, ApplePlatform.ios_device, + False, + None, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -165,6 +281,8 @@ def test_entitlement_keys_are_matched(self): "com.apple.security.application-groups": ["foo", "bar"], }, ApplePlatform.ios_device, + False, + None, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -177,6 +295,8 @@ def test_entitlement_keys_are_matched(self): "com.apple.security.application-groups": ["foo", "xxx"], }, ApplePlatform.ios_device, + False, + None, ) self.assertIsNone(selected) self.verify_diagnostic_info_candidate_profile( @@ -222,6 +342,8 @@ def test_only_profiles_containing_valid_fingerprints_are_matched(self): profiles, {}, ApplePlatform.ios_device, + False, + None, ) self.assertEqual( selected, SelectedProvisioningProfileInfo(expected, valid_identity) @@ -232,6 +354,8 @@ def test_only_profiles_containing_valid_fingerprints_are_matched(self): [unexpected], {}, ApplePlatform.ios_device, + False, + None, ) self.assertIsNone(selected) self.verify_diagnostic_info_candidate_profile( @@ -274,6 +398,8 @@ def test_matches_specific_app(self): profiles, {}, ApplePlatform.ios_device, + False, + None, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -283,6 +409,8 @@ def test_matches_specific_app(self): reversed(profiles), {}, ApplePlatform.ios_device, + False, + None, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -308,6 +436,8 @@ def test_matches_wildcard(self): [expected], None, ApplePlatform.ios_device, + False, + None, ) self.assertEqual(selected, SelectedProvisioningProfileInfo(expected, identity)) @@ -340,6 +470,8 @@ def test_force_included_app_entitlements(self): "aps-environment": "production", }, ApplePlatform.ios_device, + False, + None, ) self.assertIsNotNone(selected) @@ -371,6 +503,8 @@ def test_unmatched_app_entitlement(self): "com.made.up.entitlement": "buck", }, ApplePlatform.ios_device, + False, + None, ) self.assertIsNone(selected) self.verify_diagnostic_info_candidate_profile( diff --git a/prelude/apple/tools/defs.bzl b/prelude/apple/tools/defs.bzl index 03e8f4233f5..55f20fa3973 100644 --- a/prelude/apple/tools/defs.bzl +++ b/prelude/apple/tools/defs.bzl @@ -5,16 +5,15 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load( - "@fbsource//tools/build_defs:python_platform.bzl", - "set_platform_decorator_for_python", -) +# @oss-disable: load("@fbsource//tools/build_defs:python_platform.bzl", "set_platform_decorator_for_python") load("@prelude//:native.bzl", _native = "native") +set_platform_decorator_for_python = lambda **kwargs: kwargs # @oss-enable + def meta_python_test(name, **kwargs): # Set the platform attributes as needed for proper exec platform resolution kwargs = set_platform_decorator_for_python( - set_python_constraint_overrides = True, + # @oss-disable: set_python_constraint_overrides = True, **kwargs ) diff --git a/prelude/apple/tools/framework_sanitizer.py b/prelude/apple/tools/framework_sanitizer.py new file mode 100644 index 00000000000..8a314dd1eb9 --- /dev/null +++ b/prelude/apple/tools/framework_sanitizer.py @@ -0,0 +1,72 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +""" +Removes files from a framework that are not necessary when distributing +inside app bundles. Specifically Modules/* Headers/* and Documentation/* + +Example Usage: +xcframework_sanitizer.py --input original/Foo.framework \ +--output output/Foo.framework +""" + +import argparse +import os +import re +import shutil + +from pathlib import Path +from typing import Callable, Iterable, Pattern + + +def _should_ignore( + framework_root: str, +) -> Callable[[str, list[str]], Iterable[str]]: + prohibited: list[str] = ["Modules", "Headers", "Documentation"] + + def _should_ignore_impl(root: str, contents: list[str]) -> Iterable[str]: + if re.sub(r"/Versions/[A-Z]", "", root) == framework_root: + return prohibited + return [] + + return _should_ignore_impl + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Prepare a framework for distribution by removing unnecessary files." + ) + + parser.add_argument("--input", required=True) + parser.add_argument("--output", required=True) + parser.add_argument("--replacement-binary") + args = parser.parse_args() + + out_path = Path(args.output) + + shutil.copytree( + args.input, + out_path, + symlinks=True, + dirs_exist_ok=False, + ignore=_should_ignore(args.input), + ) + + if args.replacement_binary: + framework_name = os.path.splitext(os.path.basename(out_path))[0] + # Use realpath() because for macOS versioned bundles + # we may need to follow a symlink: + framework_binary_path = os.path.realpath(out_path / framework_name) + os.chmod(framework_binary_path, 644) + + shutil.copy(args.replacement_binary, framework_binary_path) + + +if __name__ == "__main__": + main() diff --git a/prelude/apple/tools/index/BUCK.v2 b/prelude/apple/tools/index/BUCK.v2 new file mode 100644 index 00000000000..83078b7209a --- /dev/null +++ b/prelude/apple/tools/index/BUCK.v2 @@ -0,0 +1,25 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") +load("@prelude//apple/tools/defs.bzl", "meta_python_test") + +source_listing() + +python_bootstrap_binary( + name = "merge_index_store", + main = "merge_index_store.py", + visibility = ["PUBLIC"], + deps = [":merge_index_store_lib"], +) + +python_bootstrap_library( + name = "merge_index_store_lib", + srcs = ["merge_index_store.py"], + tests = [":merge_index_store_tests"], +) + +meta_python_test( + name = "merge_index_store_tests", + srcs = [ + "merge_index_store.py", + "merge_index_store_tests.py", + ], +) diff --git a/prelude/apple/tools/index/merge_index_store.py b/prelude/apple/tools/index/merge_index_store.py new file mode 100644 index 00000000000..b690551cd18 --- /dev/null +++ b/prelude/apple/tools/index/merge_index_store.py @@ -0,0 +1,69 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +import argparse +import os +import subprocess +import sys +from argparse import Namespace +from concurrent.futures import as_completed, ThreadPoolExecutor +from pathlib import Path + +MAX_WORKDERS = 8 + + +def parse_arguments() -> Namespace: + parser = argparse.ArgumentParser() + parser.add_argument("-d", "--dest", type=str, required=True) + parser.add_argument("-s", "--sources", nargs="+", type=str, required=True) + parser.add_argument("--dummy-output", type=str, required=False) + return parser.parse_args() + + +def merge_directories(source: str, destination: str) -> None: + if os.path.isdir(source): + print(f"Merging {source} to {destination}", file=sys.stderr) + if not source.endswith("/"): + source = source + "/" + # Use rsync to copy files from source to destination + # shutil.copytree will show file eixst errors when mergeing parallelly + result = subprocess.run( + ["rsync", "-a", "--ignore-existing", source, destination], + stderr=subprocess.PIPE, + text=True, + ) + if result.returncode != 0: + raise Exception( + f"Failed to merge {source} to {destination}:\n\t{result.stderr}" + ) + else: + raise Exception(f"Directory {source} does not exist or is not a directory") + + +def main() -> None: + args = parse_arguments() + destination = args.dest + directories = args.sources + + Path(destination).mkdir(parents=True, exist_ok=True) + if args.dummy_output: + # For dummy output, create a file to avoid empty output for buck2 + Path(args.dummy_output).touch() + + with ThreadPoolExecutor(max_workers=MAX_WORKDERS) as executor: + futures = [ + executor.submit(merge_directories, index_dir, destination) + for index_dir in directories + ] + for future in as_completed(futures): + future.result() # This will raise any exceptions that occurred + + +if __name__ == "__main__": + main() diff --git a/prelude/apple/tools/index/merge_index_store_tests.py b/prelude/apple/tools/index/merge_index_store_tests.py new file mode 100644 index 00000000000..bd52a61b373 --- /dev/null +++ b/prelude/apple/tools/index/merge_index_store_tests.py @@ -0,0 +1,60 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +import subprocess +import unittest +from unittest.mock import MagicMock, patch + +from .merge_index_store import merge_directories, parse_arguments + + +class TestMergeIndexStore(unittest.TestCase): + def test_parse_arguments(self) -> None: + test_args = ["-d", "destination", "-s", "source1", "source2"] + with patch("sys.argv", ["script"] + test_args): + args = parse_arguments() + self.assertEqual(args.dest, "destination") + self.assertEqual(args.sources, ["source1", "source2"]) + + @patch("os.path.isdir") + @patch("subprocess.run") + def test_merge_directories( + self, mock_run: MagicMock, mock_isdir: MagicMock + ) -> None: + mock_isdir.return_value = True + mock_run.return_value = MagicMock(returncode=0, stderr="") + + merge_directories("source", "destination") + mock_run.assert_called_once_with( + ["rsync", "-a", "--ignore-existing", "source/", "destination"], + stderr=subprocess.PIPE, + text=True, + ) + + @patch("os.path.isdir") + @patch("subprocess.run") + def test_merge_directories_failure( + self, mock_run: MagicMock, mock_isdir: MagicMock + ) -> None: + mock_isdir.return_value = True + mock_run.return_value = MagicMock(returncode=1, stderr="Error") + + with self.assertRaises(Exception) as context: + merge_directories("source", "destination") + self.assertTrue("Failed to merge" in str(context.exception)) + + @patch("os.path.isdir") + def test_merge_non_existing_directory(self, mock_isdir: MagicMock) -> None: + mock_isdir.return_value = False + with self.assertRaises(Exception) as context: + merge_directories("source", "destination") + self.assertTrue( + "Directory source does not exist or is not a directory" + in str(context.exception) + ) diff --git a/prelude/apple/tools/info_plist_processor/BUCK.v2 b/prelude/apple/tools/info_plist_processor/BUCK.v2 index c40563c83a2..6e7c1a443c7 100644 --- a/prelude/apple/tools/info_plist_processor/BUCK.v2 +++ b/prelude/apple/tools/info_plist_processor/BUCK.v2 @@ -1,5 +1,10 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load("@prelude//apple/tools/defs.bzl", "meta_python_test") +oncall("build_infra") + +source_listing() + python_library( name = "preprocess", srcs = ["preprocess.py"], @@ -14,8 +19,8 @@ meta_python_test( python_library( name = "process", srcs = ["process.py"], - deps = ["prelude//apple/tools:plistlib_utils"], visibility = ["PUBLIC"], + deps = ["prelude//apple/tools:plistlib_utils"], ) meta_python_test( @@ -27,9 +32,9 @@ meta_python_test( python_binary( name = "tool", main = "main.py", + visibility = ["PUBLIC"], deps = [ ":preprocess", ":process", ], - visibility = ["PUBLIC"], ) diff --git a/prelude/apple/tools/make_modulemap.py b/prelude/apple/tools/make_modulemap.py index f92f037df34..d863a57965f 100755 --- a/prelude/apple/tools/make_modulemap.py +++ b/prelude/apple/tools/make_modulemap.py @@ -10,7 +10,29 @@ import os import re from io import TextIOWrapper -from typing import Dict, Iterable, List +from typing import Dict, FrozenSet, Iterable, List + + +_RESERVED_KEYWORDS: FrozenSet[str] = frozenset( + [ + "config_macros", + "conflict", + "exclude", + "explicit", + "extern", + "export_as", + "export", + "framework", + "header", + "link", + "module", + "private", + "requires", + "textual", + "umbrella", + "use", + ] +) class Module: @@ -30,7 +52,10 @@ def get_submodule(self, name: str) -> "Module": def render(self, f: TextIOWrapper, path_prefix: str, indent: int = 0) -> None: space = " " * indent - f.write(f"{space}module {self.name} {{\n") + name = self.name + if name in _RESERVED_KEYWORDS: + name = f"{name}_" + f.write(f"{space}module {name} {{\n") submodule_names = set() for submodule_name in sorted(self.submodules.keys()): @@ -81,7 +106,7 @@ def _write_submodules( module = root_module for i, component in enumerate(h.split(os.sep)): if i == 0 and component == name: - # The common case is we have a singe header path prefix that matches the module name. + # The common case is we have a single header path prefix that matches the module name. # In this case we add the headers directly to the root module. pass else: diff --git a/prelude/apple/tools/re_compatibility_utils/BUCK b/prelude/apple/tools/re_compatibility_utils/BUCK index 95fd775126d..a567b6984b5 100644 --- a/prelude/apple/tools/re_compatibility_utils/BUCK +++ b/prelude/apple/tools/re_compatibility_utils/BUCK @@ -1,5 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + oncall("build_infra") +source_listing() + prelude = native prelude.python_library( diff --git a/prelude/apple/tools/re_compatibility_utils/writable.py b/prelude/apple/tools/re_compatibility_utils/writable.py index b6c0ee90d10..af853b566eb 100644 --- a/prelude/apple/tools/re_compatibility_utils/writable.py +++ b/prelude/apple/tools/re_compatibility_utils/writable.py @@ -8,8 +8,10 @@ # pyre-strict import os +import pathlib import platform import stat +import sys def make_path_user_writable(path: str) -> None: @@ -25,7 +27,23 @@ def make_path_user_writable(path: str) -> None: # Darwin supports permission setting on symlinks. follow_symlinks = platform.system() != "Darwin" st = os.stat(path, follow_symlinks=False) - os.chmod(path, st.st_mode | stat.S_IWUSR, follow_symlinks=follow_symlinks) + + try: + os.chmod(path, st.st_mode | stat.S_IWUSR, follow_symlinks=follow_symlinks) + except FileNotFoundError as e: + path_obj = pathlib.Path(path) + if path_obj.is_symlink(): + resolved_path_obj = path_obj.resolve() + if not resolved_path_obj.exists(): + # On Linux systems, all symlinks are followed when `chmod`-ing + # (see comment above about `AT_SYMLINK_NOFOLLOW`). If that happens, + # we can ignore the `chmod` error as its harmless. + print( + f"Tried setting permission on a symlink to a non-existing path, ignoring error... {e}", + file=sys.stderr, + ) + return + raise e def make_dir_recursively_writable(dir: str) -> None: diff --git a/prelude/apple/tools/resource_broker/BUCK.v2 b/prelude/apple/tools/resource_broker/BUCK.v2 index 5aef86d064f..0a3e34a211a 100644 --- a/prelude/apple/tools/resource_broker/BUCK.v2 +++ b/prelude/apple/tools/resource_broker/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + python_binary( name = "resource_broker", main = "main.py", diff --git a/prelude/apple/tools/resource_broker/idb_target.py b/prelude/apple/tools/resource_broker/idb_target.py index bfa7630d677..f856bd75b93 100644 --- a/prelude/apple/tools/resource_broker/idb_target.py +++ b/prelude/apple/tools/resource_broker/idb_target.py @@ -31,6 +31,12 @@ class IdbTarget: port: int = 0 +@dataclass +class SimulatorInfo: + udid: str + device_set_path: str + + def managed_simulators_from_stdout(stdout: Optional[str]) -> List[IdbTarget]: if not stdout: return [] diff --git a/prelude/apple/tools/resource_broker/ios.py b/prelude/apple/tools/resource_broker/ios.py index 12bdcf841b5..6bf81029577 100644 --- a/prelude/apple/tools/resource_broker/ios.py +++ b/prelude/apple/tools/resource_broker/ios.py @@ -14,7 +14,12 @@ from .idb_companion import IdbCompanion -from .idb_target import IdbTarget, managed_simulators_from_stdout, SimState +from .idb_target import ( + IdbTarget, + managed_simulators_from_stdout, + SimState, + SimulatorInfo, +) from .simctl_runtime import list_ios_runtimes, XCSimRuntime @@ -71,7 +76,11 @@ def _compatible_device_type_from_runtime(runtime: XCSimRuntime) -> Optional[str] return None default = next(iphones) return next( - (device_type.name for device_type in iphones if device_type.name == "iPhone 8"), + ( + device_type.name + for device_type in iphones + if device_type.name == "iPhone 11" + ), default.name, ) @@ -171,7 +180,7 @@ def _select_simulator_with_preference( return simulator -async def _ios_simulator(simulator_manager: str, booted: bool) -> List[IdbCompanion]: +async def prepare_simulator(simulator_manager: str, booted: bool) -> SimulatorInfo: managed_simulators = await _get_managed_simulators_create_if_needed( simulator_manager=simulator_manager ) @@ -187,7 +196,16 @@ async def _ios_simulator(simulator_manager: str, booted: bool) -> List[IdbCompan cmd=boot_cmd, timeout=SIMULATOR_BOOT_TIMEOUT, ) + return SimulatorInfo( + udid=simulator.udid, + device_set_path=_device_set_path(), + ) + +async def _ios_simulator(simulator_manager: str, booted: bool) -> List[IdbCompanion]: + simulator = await prepare_simulator( + simulator_manager=simulator_manager, booted=booted + ) grpc_domain_sock = f"/tmp/buck2_idb_companion_{simulator.udid}" process = await spawn_companion( command=_spawn_companion_for_simulator_command( diff --git a/prelude/apple/tools/resource_broker/main.py b/prelude/apple/tools/resource_broker/main.py index 15a23ba91c9..e6a422e5413 100644 --- a/prelude/apple/tools/resource_broker/main.py +++ b/prelude/apple/tools/resource_broker/main.py @@ -19,7 +19,7 @@ from .idb_companion import IdbCompanion -from .ios import ios_booted_simulator, ios_unbooted_simulator +from .ios import ios_booted_simulator, ios_unbooted_simulator, prepare_simulator from .macos import macos_idb_companions @@ -49,6 +49,14 @@ def _args_parser() -> argparse.ArgumentParser: Pass `{_ResourceType.macosIdbCompanion}` to get MacOS companions. """, ) + parser.add_argument( + "--no-companion", + default=False, + action="store_true", + help=""" + If passed, will only create simulator. No idb_companion will be spawned. + """, + ) return parser @@ -71,6 +79,30 @@ def _check_simulator_manager_exists(simulator_manager: Optional[str]) -> None: def main() -> None: args = _args_parser().parse_args() + if args.no_companion: + if args.type == _ResourceType.macosIdbCompanion: + raise Exception( + "No resource brocker is required for MacOS tests without companion" + ) + + booted = args.type == _ResourceType.iosBootedSimulator + sim = asyncio.run( + prepare_simulator(simulator_manager=args.simulator_manager, booted=booted) + ) + result = { + "resources": [ + { + "udid": sim.udid, + "device_set_path": sim.device_set_path, + } + ] + } + json.dump(result, sys.stdout) + else: + _create_companion(args) + + +def _create_companion(args: argparse.Namespace) -> None: if args.type == _ResourceType.iosBootedSimulator: _check_simulator_manager_exists(args.simulator_manager) idb_companions.extend(asyncio.run(ios_booted_simulator(args.simulator_manager))) diff --git a/prelude/apple/tools/selective_debugging/BUCK.v2 b/prelude/apple/tools/selective_debugging/BUCK.v2 index 123760240da..bbf9b08f592 100644 --- a/prelude/apple/tools/selective_debugging/BUCK.v2 +++ b/prelude/apple/tools/selective_debugging/BUCK.v2 @@ -1,5 +1,10 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load("@prelude//apple/tools/defs.bzl", "meta_python_test") +oncall("build_infra") + +source_listing() + python_library( name = "lib", srcs = [ diff --git a/prelude/apple/tools/selective_debugging/main.py b/prelude/apple/tools/selective_debugging/main.py index d1dc98de329..c9cfd835d99 100644 --- a/prelude/apple/tools/selective_debugging/main.py +++ b/prelude/apple/tools/selective_debugging/main.py @@ -37,6 +37,10 @@ def _parse_args() -> argparse.Namespace: "--adhoc-codesign-tool", help="An adhoc codesign tool to use to re-sign the executables/dylibs, if provided.", ) + parser.add_argument( + "--persisted-targets-file", + help="A JSON file with additional targets that must be preserved by the scrubber.", + ) return parser.parse_args() @@ -46,6 +50,7 @@ def main() -> None: scrub( input_file=args.input, output_file=args.output, + persisted_targets_file=args.persisted_targets_file, targets_file=args.targets_file, spec_file=args.spec_file, adhoc_codesign_tool=args.adhoc_codesign_tool, diff --git a/prelude/apple/tools/selective_debugging/scrubber.py b/prelude/apple/tools/selective_debugging/scrubber.py index a3f75ab166a..791920b892b 100644 --- a/prelude/apple/tools/selective_debugging/scrubber.py +++ b/prelude/apple/tools/selective_debugging/scrubber.py @@ -110,16 +110,22 @@ def should_scrub_with_focused_targets_output_paths( return True -def _should_scrub_with_targets_file(json_file_path: str) -> Callable[[str], bool]: +def _should_scrub_with_targets_file( + json_file_path: str, additional_labels: Set[str] +) -> Callable[[str], bool]: focused_targets_output_paths = load_focused_targets_output_paths(json_file_path) return lambda debug_file_path: should_scrub_with_focused_targets_output_paths( - focused_targets_output_paths, debug_file_path + focused_targets_output_paths.union(additional_labels), debug_file_path ) -def _should_scrub_with_spec_file(json_file_path: str) -> Callable[[str], bool]: +def _should_scrub_with_spec_file( + json_file_path: str, additional_labels: Set[str] +) -> Callable[[str], bool]: spec = Spec(json_file_path) - return spec.scrub_debug_file_path + return lambda debug_file_path: should_scrub_with_focused_targets_output_paths( + additional_labels, debug_file_path + ) and spec.scrub_debug_file_path(debug_file_path) def _scrub( @@ -163,18 +169,20 @@ def _scrub( def scrub( input_file: str, output_file: str, + persisted_targets_file: str, targets_file: Optional[str] = None, spec_file: Optional[str] = None, adhoc_codesign_tool: Optional[str] = None, ) -> List[Tuple[str, str]]: + additional_labels = load_focused_targets_output_paths(persisted_targets_file) if targets_file and spec_file: raise Exception( "Only one of a targets file or spec file is supported, not both!" ) elif targets_file: - scrub_handler = _should_scrub_with_targets_file(targets_file) + scrub_handler = _should_scrub_with_targets_file(targets_file, additional_labels) elif spec_file: - scrub_handler = _should_scrub_with_spec_file(spec_file) + scrub_handler = _should_scrub_with_spec_file(spec_file, additional_labels) else: scrub_handler = _always_scrub diff --git a/prelude/apple/tools/selective_debugging/scrubber_test.py b/prelude/apple/tools/selective_debugging/scrubber_test.py index dd648fcd5bf..fe3c92a5c07 100644 --- a/prelude/apple/tools/selective_debugging/scrubber_test.py +++ b/prelude/apple/tools/selective_debugging/scrubber_test.py @@ -198,6 +198,7 @@ def _get_scrubber_results( scrub( str(test_binary_file), out_file.name, + None, targets_file=str(targets_json_file), adhoc_codesign_tool=adhoc_codesign_tool, ), @@ -211,13 +212,14 @@ def _get_scrubber_results( scrub( str(test_binary_file), out_file.name, + None, spec_file=str(spec_json_file), adhoc_codesign_tool=adhoc_codesign_tool, ), out_file.name, ) else: - return scrub(str(test_binary_file), out_file.name), out_file.name + return scrub(str(test_binary_file), out_file.name, None), out_file.name def _get_test_resource_file(name) -> pathlib.Path: diff --git a/prelude/apple/tools/swift_exec.py b/prelude/apple/tools/swift_exec.py new file mode 100755 index 00000000000..be07f6726fd --- /dev/null +++ b/prelude/apple/tools/swift_exec.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import os +import subprocess +import sys + +_RE_TMPDIR_ENV_VAR = "TMPDIR" +_FILE_WRITE_FAILURE_MARKER = "could not write" + + +def main(): + env = os.environ.copy() + if "INSIDE_RE_WORKER" in env and _RE_TMPDIR_ENV_VAR in env: + # Use $TMPDIR for the module cache location. This + # will be set to a unique location for each RE action + # which will avoid sharing modules across RE actions. + # This is necessary as the inputs to the modules will + # be transient and can be removed at any point, causing + # module validation errors to fail builds. + # https://github.com/llvm/llvm-project/blob/main/clang/lib/Driver/ToolChains/Clang.cpp#L3709 + env["CLANG_MODULE_CACHE_PATH"] = os.path.join( + env[_RE_TMPDIR_ENV_VAR], "buck-module-cache" + ) + else: + # For local actions use a shared module cache location. + # This should be safe to share across the other local + # compilation actions. + env["CLANG_MODULE_CACHE_PATH"] = "/tmp/buck-module-cache" + + command = sys.argv[1:] + # Apply a debug prefix map for the current directory + # to make debug info relocatable. To correctly make paths + # relocatable, we must use that path at which the action + # is run (be it locally or on RE) and this is not known + # at the time of action definition. + command += [ + "-debug-prefix-map", + f"{os.getcwd()}/=", + ] + # Apply a coverage prefix map for the current directory + # to make file path metadata relocatable stripping + # the current directory from it. + command += [ + "-coverage-prefix-map", + f"{os.getcwd()}=.", + ] + + result = subprocess.run( + command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding=sys.stdout.encoding, + env=env, + ) + + print(result.stdout, file=sys.stdout, end="") + print(result.stderr, file=sys.stderr, end="") + + if result.returncode == 0: + # The Swift compiler will return an exit code of 0 and warn when it cannot write auxiliary files. + # Detect and error so that the action is not cached. + failed_write = ( + _FILE_WRITE_FAILURE_MARKER in result.stdout + or _FILE_WRITE_FAILURE_MARKER in result.stderr + ) + if failed_write: + print( + "Detected Swift compiler file write error but compiler exited with code 0, failing command..." + ) + sys.exit(1) + + sys.exit(result.returncode) + + +if __name__ == "__main__": + main() diff --git a/prelude/apple/tools/swift_exec.sh b/prelude/apple/tools/swift_exec.sh deleted file mode 100755 index 287f7c278b1..00000000000 --- a/prelude/apple/tools/swift_exec.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env bash -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -set -e - -if [ -n "$INSIDE_RE_WORKER" ]; then - # Use $TMPDIR for the module cache location. This - # will be set to a unique location for each RE action - # which will avoid sharing modules across RE actions. - # This is necessary as the inputs to the modules will - # be transient and can be removed at any point, causing - # module validation errors to fail builds. - # https://github.com/llvm/llvm-project/blob/main/clang/lib/Driver/ToolChains/Clang.cpp#L3709 - export CLANG_MODULE_CACHE_PATH="$TMPDIR/buck-module-cache" -else - # For local actions use a shared module cache location. - # This should be safe to share across the other local - # compilation actions. - export CLANG_MODULE_CACHE_PATH="/tmp/buck-module-cache" -fi - -# Apply a debug prefix map for the current directory -# to make debug info relocatable. To correctly make paths -# relocatable, we must use that path at which the action -# is run (be it locally or on RE) and this is not known -# at the time of action definition. -exec "$@" -debug-prefix-map "$PWD"/= diff --git a/prelude/apple/tools/swift_objc_header_postprocess.py b/prelude/apple/tools/swift_objc_header_postprocess.py deleted file mode 100755 index f3ccfc5fcb0..00000000000 --- a/prelude/apple/tools/swift_objc_header_postprocess.py +++ /dev/null @@ -1,312 +0,0 @@ -#!/usr/bin/env fbpython -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -import argparse -import json -import os -import re -import sys -from typing import Dict, Iterable, TextIO - -# Out-of-date? Update with this command: -# -# xcode-select --print-path | xargs printf '%s/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator.sdk/System/Library/Frameworks/' | xargs ls | rg '^([A-Z].+)\.framework$' -r '${1}' | xargs printf ' "%s",\n' && xcode-select --print-path | xargs printf '%s/Platforms/iPhoneSimulator.platform/Developer/SDKs/iPhoneSimulator.sdk/usr/include/module.modulemap' | xargs cat | rg '^module ([a-zA-Z0-9_]*) .*$' -r '${1}'| xargs printf ' "%s",\n' -APPLE_SYSTEM_MODULES = { - "ARKit", - "AVFAudio", - "AVFoundation", - "AVKit", - "Accelerate", - "Accessibility", - "Accounts", - "AdServices", - "AdSupport", - "AddressBook", - "AddressBookUI", - "AppClip", - "AppTrackingTransparency", - "AssetsLibrary", - "AudioToolbox", - "AudioUnit", - "AuthenticationServices", - "AutomaticAssessmentConfiguration", - "BackgroundTasks", - "BusinessChat", - "CFNetwork", - "CallKit", - "CarPlay", - "ClassKit", - "ClockKit", - "CloudKit", - "Combine", - "Contacts", - "ContactsUI", - "CoreAudio", - "CoreAudioKit", - "CoreAudioTypes", - "CoreBluetooth", - "CoreData", - "CoreFoundation", - "CoreGraphics", - "CoreHaptics", - "CoreImage", - "CoreLocation", - "CoreLocationUI", - "CoreMIDI", - "CoreML", - "CoreMedia", - "CoreMotion", - "CoreNFC", - "CoreServices", - "CoreSpotlight", - "CoreTelephony", - "CoreText", - "CoreVideo", - "CryptoKit", - "CryptoTokenKit", - "DataDetection", - "DeveloperToolsSupport", - "DeviceActivity", - "DeviceCheck", - "EventKit", - "EventKitUI", - "ExposureNotification", - "ExternalAccessory", - "FamilyControls", - "FileProvider", - "FileProviderUI", - "Foundation", - "GLKit", - "GSS", - "GameController", - "GameKit", - "GameplayKit", - "GroupActivities", - "HealthKit", - "HealthKitUI", - "HomeKit", - "IOKit", - "IOSurface", - "IdentityLookup", - "IdentityLookupUI", - "ImageCaptureCore", - "ImageIO", - "Intents", - "IntentsUI", - "JavaScriptCore", - "LinkPresentation", - "LocalAuthentication", - "ManagedSettings", - "ManagedSettingsUI", - "MapKit", - "MediaAccessibility", - "MediaPlayer", - "MediaToolbox", - "MessageUI", - "Messages", - "Metal", - "MetalKit", - "MetalPerformanceShaders", - "MetalPerformanceShadersGraph", - "MetricKit", - "MobileCoreServices", - "ModelIO", - "MultipeerConnectivity", - "MusicKit", - "NaturalLanguage", - "NearbyInteraction", - "Network", - "NetworkExtension", - "NewsstandKit", - "NotificationCenter", - "OSLog", - "OpenAL", - "OpenGLES", - "PDFKit", - "PHASE", - "PassKit", - "PencilKit", - "Photos", - "PhotosUI", - "PushKit", - "QuartzCore", - "QuickLook", - "QuickLookThumbnailing", - "RealityFoundation", - "RealityKit", - "ReplayKit", - "SafariServices", - "SceneKit", - "ScreenTime", - "Security", - "SensorKit", - "ShazamKit", - "Social", - "SoundAnalysis", - "Speech", - "SpriteKit", - "StoreKit", - "SwiftUI", - "SystemConfiguration", - "TabularData", - "Twitter", - "UIKit", - "UniformTypeIdentifiers", - "UserNotifications", - "UserNotificationsUI", - "VideoSubscriberAccount", - "VideoToolbox", - "Vision", - "VisionKit", - "WatchConnectivity", - "WebKit", - "WidgetKit", - "AppleTextureEncoder", - "Compression", - "Darwin", - "asl", - "dnssd", - "os", - "os_object", - "os_workgroup", - "libkern", - "notify", - "zlib", - "SQLite3", -} - -APPLE_TEST_FRAMEWORKS = { - "XCTest", -} - - -# These modules require specific handling, as they do not have an umbrella -# header that matches the module name, as typical Apple frameworks do. -APPLE_SYSTEM_MODULE_OVERRIDES = { - "Dispatch": ("dispatch", ("dispatch.h",)), - "ObjectiveC": ("objc", ("runtime.h",)), -} - - -def write_imports_for_headers(out: TextIO, prefix: str, headers: Iterable[str]) -> None: - for header in headers: - print(f"#import <{prefix}/{header}>", file=out) - - -def write_imports_for_modules( - out: TextIO, - postprocessing_module_name: str, - modules: Iterable[str], - deps: Dict[str, Iterable[str]], -) -> None: - # We only include the traditional textual imports when modules are disabled, so - # that the behavior with modules enabled is identical to the behavior without - # the postprocessing. - print("#else", file=out) - for module in modules: - if headers := deps.get(module): - write_imports_for_headers(out, module, headers) - elif override := APPLE_SYSTEM_MODULE_OVERRIDES.get(module): - write_imports_for_headers(out, override[0], override[1]) - elif module in APPLE_SYSTEM_MODULES or module in APPLE_TEST_FRAMEWORKS: - # When we don't have an explicit override for the module, we use the module's - # name as an umbrella header. This is used for typical Apple frameworks like - # Foundation and UIKit. - write_imports_for_headers(out, module, (f"{module}.h",)) - else: - print( - f""" -The module "{module}" was imported as a dependency of Swift code in "{postprocessing_module_name}", but could not be mapped to a list of header imports by Buck's Swift header postprocessing. There are two possibilities: - -1. If "{module}" is an internal library, it is likely that the exported_deps of "{postprocessing_module_name}" are incorrect. Try fixing them manually or with "arc fixmydeps". This is the most likely issue. - -2. If "{module}" is a system (Apple) framework, the list of Apple system modules in {os.path.basename(__file__)} is out-of-date. There is a command to fix it in that file. This issue is unlikely. -""", - file=sys.stderr, - ) - sys.exit(1) - - -def main() -> None: - parser = argparse.ArgumentParser() - parser.add_argument("header") - parser.add_argument("deps") - parser.add_argument("out") - args = parser.parse_args() - - with open(args.deps) as f: - deps = json.load(f) - - # Strips the suffix from the header name, leaving us with just the name - # of the module that we are postprocessing the header for. This is used - # for error reporting. - postprocessing_module_name = os.path.basename(args.header).split("-")[0] - - # The Swift compiler's output looks like this for Swift5.8: - # - # #if __has_feature(objc_modules) - # #if __has_warning("-Watimport-in-framework-header") - # #pragma clang diagnostic ignored "-Watimport-in-framework-header" - # #endif - # @import ModuleA; - # @import ModuleB; - # @import ModuleC; - # #endif - # - # The implementation here balances being somewhat flexible to changes to the compiler's - # output, unlikely though they may be, with avoiding adding too much complexity and getting - # too close to implementing a full parser for Objective-C un-preprocessed header files. - - with open(args.header) as header, open(args.out, "w") as out: - # When this is None, it means that we are still searching for the start of the conditional - # @import block in the generated header. - modules = None - # The Swift compiler emits an additional #if gate inside the conditional @import block, so - # we need to track whether we're in a further nested conditional so that we know when the - # main conditional block has ended. - if_level = 0 - - for line in header: - line = line.rstrip("\n") - # When the modules has not been set, we are still searching for the start of the - # modules @import section. - if modules is None: - # The line changed from __has_feature(modules) to __has_feature(objc_modules) between Swift5.7 and Swift5.8. - # For the time being, we need to check for either to support both Xcode14.2 and Xcode14.3 onwards. - if ( - line == "#if __has_feature(objc_modules)" - or line == "#if __has_feature(modules)" - ): - modules = [] - if_level = 1 - else: - if line.startswith("@import"): - # Splitting on: - # "@import ": to separate from the @import. - # Semicolon and period: to separate the main module name from submodules or EOL. - # The module name will then be the first item. - modules.append(re.split(r"@import |[;.]", line)[1]) - elif line.startswith("#if"): - # This allows us to handle the Clang diagnostic #if block that the compiler inserts - # within the main #if block for modules. - if_level += 1 - elif line.startswith("#endif"): - if_level -= 1 - if if_level == 0: - write_imports_for_modules( - out, - postprocessing_module_name, - modules, - deps, - ) - modules = None - print(line, file=out) - - -if __name__ == "__main__": - main() diff --git a/prelude/apple/tools/xcframework_maker.py b/prelude/apple/tools/xcframework_maker.py new file mode 100644 index 00000000000..17806db0e5e --- /dev/null +++ b/prelude/apple/tools/xcframework_maker.py @@ -0,0 +1,163 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +""" +Packages given input files into the correct format for an XCFramework, and generates +the required Info.plist. + +Example Usage: +xcframework_maker.py --name FooKit --output_path /tmp/FooKit.xcframework \ + --framework_path ios-arm64 input/ios/FooKit.xcframework \ + --dsym_path ios-arm64 input/ios/dSYM \ + --framework_path ios-arm64_x86_64-simulator input/ios-simulator/FooKit.xcframework \ + --dsym_path ios-arm64_x86_64-simulator input/ios-simulator/dSYM +""" + +import argparse +import plistlib +import shutil + +from pathlib import Path +from typing import Any, Optional + +# functions that take architecture specifiers as 'item'. +# Examples: +# ios-arm64_x86_64-simulator +# -> supported platform: ios +# -> supported architectures [arm64, x86_64] +# -> supported platform variant: simulator +# watchos-arm64_arm64_32 +# -> supported platform: watchos +# -> supported architectures: [arm64, arm64_32] +# -> supported platform variant: None + + +def _supported_architectures(item: str) -> list[str]: + archs = [] + # order is important so that we can + # consume 'arm64_32' first to prevent it + # later matching arm64 + for arch in ["arm64_32", "arm64", "x86_64"]: + if arch in item: + archs.append(arch) + item = item.replace(arch, "") + return archs + + +def _supported_platform(item: str) -> str: + return item.split("-")[0] + + +def _supported_platform_variant(item: str) -> Optional[str]: + components = item.split("-") + if len(components) > 2: + return components[2] + else: + return None + + +def _make_plist_entry( + item: str, binary_path: str, library_path: str, dsym_path: Optional[str] +) -> dict[str, Any]: + entry = { + "BinaryPath": binary_path, + "LibraryIdentifier": item, + "LibraryPath": library_path, + "SupportedArchitectures": _supported_architectures(item), + "SupportedPlatform": _supported_platform(item), + } + variant = _supported_platform_variant(item) + if variant is not None: + entry["SupportedPlatformVariant"] = variant + + if dsym_path is not None: + entry["DebugSymbolsPath"] = dsym_path + + return entry + + +def _make_plist( + items: list[str], + binary_paths: list[str], + library_path: str, + dsym_paths: list[Optional[str]], +) -> bytes: + d = {} + d["AvailableLibraries"] = [ + _make_plist_entry(item, binary_path, library_path, dsym_path) + for (item, binary_path, dsym_path) in zip(items, binary_paths, dsym_paths) + ] + d["CFBundlePackageType"] = "XFWK" + d["XCFrameworkFormatVersion"] = "1.0" + return plistlib.dumps(d) + + +def _find_binary_path(framework_fullpath: str, binary_name: str) -> str: + fullpath = Path(framework_fullpath) + versioned_binary_paths = sorted(fullpath.glob("Versions/Current/" + binary_name)) + if len(versioned_binary_paths) > 0: + return versioned_binary_paths[-1].relative_to(fullpath.parents[0]).as_posix() + return fullpath.name + "/" + binary_name + + +def main() -> None: + parser = argparse.ArgumentParser(description="Tool to make an xcframework bundle.") + parser.add_argument("--output-path") + parser.add_argument("--name") + parser.add_argument("--framework-path", action="append", nargs="+") + parser.add_argument( + "--dsym-path", action="append", nargs="+", default=[], required=False + ) + args = parser.parse_args() + + out_path = Path(args.output_path) + out_path.mkdir(parents=True, exist_ok=False) + + plist_path = out_path / "Info.plist" + items = [fp_args[0] for fp_args in args.framework_path] + binary_paths = [] + dsym_path_map = {} + + for framework_path in args.framework_path: + + # args are structured like this + # --framework_path ios-arm64 buck-out/path/to/MyPkg.framework + + framework_arch = framework_path[0] + framework_fullpath = framework_path[1] + framework_basename = Path(framework_fullpath).name + + shutil.copytree( + framework_fullpath, + out_path / framework_arch / framework_basename, + symlinks=True, + dirs_exist_ok=False, + ) + + binary_paths.append(_find_binary_path(framework_fullpath, args.name)) + + for dsym_path in args.dsym_path: + dsym_arch = dsym_path[0] + dsym_fullpath = dsym_path[1] + shutil.copytree( + dsym_fullpath, + out_path / dsym_arch / "dSYMs" / (args.name + ".framework.dSYM"), + symlinks=True, + dirs_exist_ok=False, + ) + dsym_path_map[dsym_arch] = "dSYMs" + + dsym_paths = [dsym_path_map.get(arch) for arch in items] + + library_path = args.name + ".framework" + plist_path.write_bytes(_make_plist(items, binary_paths, library_path, dsym_paths)) + + +if __name__ == "__main__": + main() diff --git a/prelude/apple/user/apple_ipa_package.bzl b/prelude/apple/user/apple_ipa_package.bzl new file mode 100644 index 00000000000..e6995536b7c --- /dev/null +++ b/prelude/apple/user/apple_ipa_package.bzl @@ -0,0 +1,158 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//apple:apple_bundle_destination.bzl", "AppleBundleDestination", "bundle_relative_path_for_destination") +load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleInfo", "ApplePackageExtension") +load("@prelude//apple:apple_package_config.bzl", "IpaCompressionLevel") +load("@prelude//apple:apple_rules_impl_utility.bzl", "get_apple_bundle_toolchain_attr") +load("@prelude//apple:apple_sdk.bzl", "get_apple_sdk_name") +load("@prelude//apple:apple_swift_stdlib.bzl", "should_copy_swift_stdlib") +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") +load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") +load("@prelude//utils:arglike.bzl", "ArgLike") + +def _apple_ipa_package_impl(ctx: AnalysisContext) -> list[Provider]: + ipa_package = _get_ipa_contents(ctx) + return [DefaultInfo(default_output = ipa_package)] + +def _apple_ipa_package_attribs(): + attribs = { + "bundle": attrs.dep(providers = [AppleBundleInfo]), + "ext": attrs.enum(ApplePackageExtension.values(), default = "ipa"), + "labels": attrs.list(attrs.string(), default = []), + "package_name": attrs.option(attrs.string(), default = None), + "_apple_toolchain": get_apple_bundle_toolchain_attr(), + "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), + "_ipa_compression_level": attrs.enum(IpaCompressionLevel.values()), + } + return attribs + +registration_spec = RuleRegistrationSpec( + name = "apple_ipa_package", + impl = _apple_ipa_package_impl, + attrs = _apple_ipa_package_attribs(), +) + +_IPA_PACKAGE_FORWARDED_FIELDS = [ + "bundle", + "ext", + "package_name", + "_ipa_compression_level", + "compatible_with", + "exec_compatible_with", + "target_compatible_with", + "default_target_platform", + "within_view", + "visibility", +] + +def make_apple_ipa_package_target(apple_ipa_package_rule, **kwargs) -> [None, str]: + ipa_package_kwargs = { + "labels": ["generated"], + } + for field_name in _IPA_PACKAGE_FORWARDED_FIELDS: + ipa_package_kwargs[field_name] = kwargs.get(field_name) + + ipa_package_target_name = kwargs["name"] + "__IPA_Package_Private" + apple_ipa_package_rule( + name = ipa_package_target_name, + **ipa_package_kwargs + ) + + return ":{}".format(ipa_package_target_name) + +def _get_ipa_contents(ctx: AnalysisContext) -> Artifact: + bundle = ctx.attrs.bundle + app = bundle[DefaultInfo].default_outputs[0] + + contents = { + paths.join("Payload", app.basename): app, + } + + apple_bundle_info = bundle[AppleBundleInfo] + if (not apple_bundle_info.skip_copying_swift_stdlib) and should_copy_swift_stdlib(app.extension): + swift_support_path = paths.join("SwiftSupport", get_apple_sdk_name(ctx)) + contents[swift_support_path] = _get_swift_support_dir(ctx, app, apple_bundle_info) + + if apple_bundle_info.contains_watchapp: + contents["Symbols"] = _build_symbols_dir(ctx) + + return ctx.actions.copied_dir( + "__unzipped_ipa_contents__", + contents, + ) + +def _build_symbols_dir(ctx) -> Artifact: + symbols_dir = ctx.actions.declare_output("__symbols__", dir = True) + ctx.actions.run( + cmd_args(["mkdir", "-p", symbols_dir.as_output()]), + category = "watchos_symbols_dir", + ) + + return symbols_dir + +def _get_swift_support_dir(ctx, bundle_output: Artifact, bundle_info: AppleBundleInfo) -> Artifact: + stdlib_tool = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info.swift_stdlib_tool + sdk_name = get_apple_sdk_name(ctx) + + # .app -> app + # This is the way the input is expected. + extension = bundle_output.extension[1:] + swift_support_dir = ctx.actions.declare_output("__swift_dylibs__", dir = True) + script, _ = ctx.actions.write( + "build_swift_support.sh", + [ + cmd_args("set -euo pipefail"), + cmd_args(swift_support_dir, format = "mkdir -p {}"), + cmd_args( + [ + stdlib_tool, + # If you're debugging, you can pass the '--verbose' flag here. + "--copy", + "--scan-executable", + cmd_args( + [ + bundle_output, + bundle_relative_path_for_destination(AppleBundleDestination("executables"), sdk_name, extension, False), + bundle_info.binary_name, + ], + delimiter = "/", + ), + _get_scan_folder_args(AppleBundleDestination("plugins"), bundle_output, sdk_name, extension), + _get_scan_folder_args(AppleBundleDestination("extensionkit_extensions"), bundle_output, sdk_name, extension), + _get_scan_folder_args(AppleBundleDestination("frameworks"), bundle_output, sdk_name, extension), + _get_scan_folder_args(AppleBundleDestination("appclips"), bundle_output, sdk_name, extension), + "--destination", + swift_support_dir, + ], + delimiter = " ", + quote = "shell", + ), + ], + allow_args = True, + ) + ctx.actions.run( + cmd_args(["/bin/sh", script], hidden = [stdlib_tool, bundle_output, swift_support_dir.as_output()]), + category = "copy_swift_stdlibs", + ) + + return swift_support_dir + +def _get_scan_folder_args(dest: AppleBundleDestination, bundle_output: Artifact, sdk_name, extension) -> ArgLike: + return cmd_args( + [ + "--scan-folder", + cmd_args( + [ + bundle_output, + bundle_relative_path_for_destination(dest, sdk_name, extension, False), + ], + delimiter = "/", + ), + ], + ) diff --git a/prelude/apple/user/apple_macos_bundle.bzl b/prelude/apple/user/apple_macos_bundle.bzl new file mode 100644 index 00000000000..17043975bfa --- /dev/null +++ b/prelude/apple/user/apple_macos_bundle.bzl @@ -0,0 +1,23 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//apple:apple_bundle.bzl", "apple_bundle_impl") +load("@prelude//apple:apple_bundle_attrs.bzl", "apple_macos_bundle_attrs") +load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") +load(":macos_transition.bzl", "macos_transition") + +def apple_macos_bundle_impl(ctx: AnalysisContext) -> list[Provider]: + # This rule is _equivalent_ to `apple_bundle` except it applies + # an incoming macOS transition. + return apple_bundle_impl(ctx) + +registration_spec = RuleRegistrationSpec( + name = "apple_macos_bundle", + impl = apple_macos_bundle_impl, + attrs = apple_macos_bundle_attrs(), + cfg = macos_transition, +) diff --git a/prelude/apple/user/apple_resource_bundle.bzl b/prelude/apple/user/apple_resource_bundle.bzl index 66c902dfb38..8562374d401 100644 --- a/prelude/apple/user/apple_resource_bundle.bzl +++ b/prelude/apple/user/apple_resource_bundle.bzl @@ -5,19 +5,20 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_system_identification_attrs") load("@prelude//apple:apple_bundle_resources.bzl", "get_apple_bundle_resource_part_list") load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo") +load("@prelude//apple:apple_rules_impl_utility.bzl", "get_apple_info_plist_build_system_identification_attrs") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") +load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") load("@prelude//decls/ios_rules.bzl", "AppleBundleExtension") -load(":resource_group_map.bzl", "resource_group_map_attr") def _get_apple_resources_toolchain_attr(): # FIXME: prelude// should be standalone (not refer to fbcode//) return attrs.toolchain_dep(default = "fbcode//buck2/platform/toolchain:apple-resources", providers = [AppleToolchainInfo]) -def _impl(ctx: AnalysisContext) -> list[Provider]: +def _apple_resource_bundle_impl(ctx: AnalysisContext) -> list[Provider]: resource_output = get_apple_bundle_resource_part_list(ctx) return [ DefaultInfo(), @@ -29,7 +30,8 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: def _apple_resource_bundle_attrs(): attribs = { "asset_catalogs_compilation_options": attrs.dict(key = attrs.string(), value = attrs.any(), default = {}), - "binary": attrs.option(attrs.dep(), default = None), + "binary": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), + "copy_public_framework_headers": attrs.option(attrs.bool(), default = None), "deps": attrs.list(attrs.dep(), default = []), "extension": attrs.one_of(attrs.enum(AppleBundleExtension), attrs.string()), "ibtool_flags": attrs.option(attrs.list(attrs.string()), default = None), @@ -37,10 +39,12 @@ def _apple_resource_bundle_attrs(): "info_plist": attrs.source(), "info_plist_substitutions": attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False, default = {}), "labels": attrs.list(attrs.string(), default = []), + "module_map": attrs.option(attrs.source(), default = None), "privacy_manifest": attrs.option(attrs.source(), default = None), "product_name": attrs.option(attrs.string(), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": resource_group_map_attr(), + "resource_group_map": RESOURCE_GROUP_MAP_ATTR, + "universal": attrs.option(attrs.bool(), default = None), # Only include macOS hosted toolchains, so we compile resources directly on Mac RE "_apple_toolchain": _get_apple_resources_toolchain_attr(), "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), @@ -54,6 +58,6 @@ def _apple_resource_bundle_attrs(): registration_spec = RuleRegistrationSpec( name = "apple_resource_bundle", - impl = _impl, + impl = _apple_resource_bundle_impl, attrs = _apple_resource_bundle_attrs(), ) diff --git a/prelude/apple/user/apple_resource_transition.bzl b/prelude/apple/user/apple_resource_transition.bzl index 99464c6a538..a3817a5bb1e 100644 --- a/prelude/apple/user/apple_resource_transition.bzl +++ b/prelude/apple/user/apple_resource_transition.bzl @@ -9,16 +9,15 @@ def _is_universal(platform: PlatformInfo, refs: struct) -> bool: universal = platform.configuration.constraints.get(refs.universal[ConstraintSettingInfo].label) return universal.label == refs.universal_enabled[ConstraintValueInfo].label if universal != None else False -def _impl(platform: PlatformInfo, refs: struct, attrs: struct) -> PlatformInfo: +def _apple_resource_transition_impl(platform: PlatformInfo, refs: struct, attrs: struct) -> PlatformInfo: if attrs.skip_universal_resource_dedupe or not _is_universal(platform, refs): return platform else: cpu_constraint_label = refs.cpu[ConstraintSettingInfo].label - universal_constraint_label = refs.universal[ConstraintSettingInfo].label filtered_constraints = { constraint_setting_label: constraint_setting_value for (constraint_setting_label, constraint_setting_value) in platform.configuration.constraints.items() - if constraint_setting_label != cpu_constraint_label and constraint_setting_label != universal_constraint_label + if constraint_setting_label != cpu_constraint_label } return PlatformInfo( label = "apple_universal_deduped_resource", @@ -29,11 +28,11 @@ def _impl(platform: PlatformInfo, refs: struct, attrs: struct) -> PlatformInfo: ) apple_resource_transition = transition( - impl = _impl, + impl = _apple_resource_transition_impl, refs = { "cpu": "config//cpu/constraints:cpu", - "universal": "config//build_mode/apple/constraints:universal", - "universal_enabled": "config//build_mode/apple/constraints:universal-enabled", + "universal": "config//cpu/constraints:universal", + "universal_enabled": "config//cpu/constraints:universal-enabled", }, attrs = [ "skip_universal_resource_dedupe", diff --git a/prelude/apple/user/apple_selective_debugging.bzl b/prelude/apple/user/apple_selective_debugging.bzl index 6df13c4ee98..ba5bbef33ac 100644 --- a/prelude/apple/user/apple_selective_debugging.bzl +++ b/prelude/apple/user/apple_selective_debugging.bzl @@ -5,6 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load( + "@prelude//:artifact_tset.bzl", + "ArtifactInfoTag", +) load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolsInfo") load( "@prelude//linking:execution_preference.bzl", @@ -20,7 +24,6 @@ load( "parse_build_target_pattern", ) load("@prelude//utils:lazy.bzl", "lazy") -load("@prelude//utils:set.bzl", "set") _SelectionCriteria = record( include_build_target_patterns = field(list[BuildTargetPattern], []), @@ -40,6 +43,7 @@ AppleSelectiveDebuggingInfo = provider( AppleSelectiveDebuggingFilteredDebugInfo = record( map = field(dict[Label, list[Artifact]]), + swift_modules_labels = field(list[Label]), ) # The type of selective debugging json input to utilze. @@ -55,7 +59,7 @@ _SelectiveDebuggingJsonType = enum(*_SelectiveDebuggingJsonTypes) _LOCAL_LINK_THRESHOLD = 0.2 -def _impl(ctx: AnalysisContext) -> list[Provider]: +def _apple_selective_debugging_impl(ctx: AnalysisContext) -> list[Provider]: json_type = _SelectiveDebuggingJsonType(ctx.attrs.json_type) # process inputs and provide them up the graph with typing @@ -108,7 +112,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: # # See `_maybe_scrub_binary()` in apple_bundle.bzl if json_type != _SelectiveDebuggingJsonType("targets"): - return inner_ctx.actions.write(output_name, sorted(set(package_names).list())) + return inner_ctx.actions.write(output_name, sorted(set(package_names))) def scrub_selected_debug_paths_action(dynamic_ctx: AnalysisContext, artifacts, outputs): packages = [ @@ -118,20 +122,25 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: ] dynamic_ctx.actions.write( outputs.values()[0], - sorted(set(filter(lambda p: p in packages, package_names)).list()), + sorted(set(filter(lambda p: p in packages, package_names))), ) output = inner_ctx.actions.declare_output(output_name) inner_ctx.actions.dynamic_output( dynamic = [targets_json_file], inputs = [], - outputs = [output], + outputs = [output.as_output()], f = scrub_selected_debug_paths_action, ) return output - def scrub_binary(inner_ctx, executable: Artifact, executable_link_execution_preference: LinkExecutionPreference, adhoc_codesign_tool: [RunInfo, None]) -> Artifact: + def scrub_binary( + inner_ctx, + executable: Artifact, + executable_link_execution_preference: LinkExecutionPreference, + adhoc_codesign_tool: [RunInfo, None], + focused_targets_labels: list[Label]) -> Artifact: inner_cmd = cmd_args(cmd) output = inner_ctx.actions.declare_output("debug_scrubbed/{}".format(executable.short_path)) @@ -143,6 +152,12 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: inner_cmd.add(["--adhoc-codesign-tool", adhoc_codesign_tool]) inner_cmd.add(["--input", executable]) inner_cmd.add(["--output", output.as_output()]) + if len(focused_targets_labels) > 0: + additional_labels_json = inner_ctx.actions.write_json( + inner_ctx.attrs.name + ".additional_labels.json", + {"targets": [label.raw_target() for label in focused_targets_labels]}, + ) + inner_cmd.add(["--persisted-targets-file", additional_labels_json]) inner_ctx.actions.run( inner_cmd, category = "scrub_binary", @@ -156,12 +171,22 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: def filter_debug_info(debug_info: TransitiveSetIterator) -> AppleSelectiveDebuggingFilteredDebugInfo: map = {} + selected_targets_contain_swift = False for infos in debug_info: for info in infos: - if _is_label_included(info.label, selection_criteria): - map[info.label] = info.artifacts - - return AppleSelectiveDebuggingFilteredDebugInfo(map = map) + is_swiftmodule = ArtifactInfoTag("swiftmodule") in info.tags + is_swift_pcm = ArtifactInfoTag("swift_pcm") in info.tags + is_swift_related = is_swiftmodule or is_swift_pcm + if _is_label_included(info.label, selection_criteria) or (selected_targets_contain_swift and is_swift_related): + # There might be a few ArtifactInfo corresponding to the same Label, + # so to avoid overwriting, we need to preserve all artifacts. + if info.label in map: + map[info.label] += info.artifacts + else: + map[info.label] = list(info.artifacts) + + selected_targets_contain_swift = selected_targets_contain_swift or ArtifactInfoTag("swiftmodule") in info.tags + return AppleSelectiveDebuggingFilteredDebugInfo(map = map, swift_modules_labels = []) def preference_for_links(links: list[Label], deps_preferences: list[LinkExecutionPreferenceInfo]) -> LinkExecutionPreference: # If any dependent links were run locally, prefer that the current link is also performed locally, @@ -195,7 +220,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: registration_spec = RuleRegistrationSpec( name = "apple_selective_debugging", - impl = _impl, + impl = _apple_selective_debugging_impl, attrs = { "exclude_build_target_patterns": attrs.list(attrs.string(), default = []), "exclude_regular_expressions": attrs.list(attrs.string(), default = []), diff --git a/prelude/apple/user/apple_simulators.bzl b/prelude/apple/user/apple_simulators.bzl index 08deb59e13a..9f4208478c2 100644 --- a/prelude/apple/user/apple_simulators.bzl +++ b/prelude/apple/user/apple_simulators.bzl @@ -13,30 +13,13 @@ def _rule_impl(ctx: AnalysisContext) -> list[Provider]: LocalResourceInfo( setup = cmd_args([ctx.attrs.broker[RunInfo], "--simulator-manager", ctx.attrs.idb_targets[RunInfo]] + ctx.attrs.args), resource_env_vars = { - "IDB_COMPANION": "socket_address", + "DEVICE_SET_PATH": "device_set_path", + "DEVICE_UDID": "udid", }, setup_timeout_seconds = ctx.attrs.setup_timeout_seconds, ), ] -# We don't want `apple_simulators` target to be configured differently and handled as a different resource broker by buck2 core. -# By nuking a platform we make sure there is only a single configured target for a resource broker which manages resources of certain type. -def _transition_impl(platform: PlatformInfo, refs: struct) -> PlatformInfo: - # buildifier: disable=unused-variable - _ = (platform, refs) - return PlatformInfo( - label = "apple_simulators", - configuration = ConfigurationInfo( - constraints = {}, - values = {}, - ), - ) - -apple_simulators_transition = transition( - impl = _transition_impl, - refs = {}, -) - registration_spec = RuleRegistrationSpec( name = "apple_simulators", impl = _rule_impl, diff --git a/prelude/apple/user/apple_toolchain_override.bzl b/prelude/apple/user/apple_toolchain_override.bzl index fce7dbfa7ef..f45ec583bff 100644 --- a/prelude/apple/user/apple_toolchain_override.bzl +++ b/prelude/apple/user/apple_toolchain_override.bzl @@ -9,7 +9,7 @@ load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -def _impl(ctx: AnalysisContext) -> list[Provider]: +def _apple_toolchain_override_impl(ctx: AnalysisContext) -> list[Provider]: base = ctx.attrs.base[AppleToolchainInfo] cxx_toolchain_override = ctx.attrs.cxx_toolchain[CxxToolchainInfo] return [ @@ -30,17 +30,16 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: installer = base.installer, libtool = base.libtool, lipo = base.lipo, - min_version = base.min_version, + mapc = base.mapc, + merge_index_store = base.merge_index_store, momc = base.momc, objdump = base.objdump, - odrcov = base.odrcov, platform_path = base.platform_path, sdk_build_version = base.sdk_build_version, sdk_name = base.sdk_name, sdk_path = base.sdk_path, sdk_version = base.sdk_version, swift_toolchain_info = base.swift_toolchain_info, - watch_kit_stub_binary = base.watch_kit_stub_binary, xcode_build_version = base.xcode_build_version, xcode_version = base.xcode_version, xctest = base.xctest, @@ -49,7 +48,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: registration_spec = RuleRegistrationSpec( name = "apple_toolchain_override", - impl = _impl, + impl = _apple_toolchain_override_impl, attrs = { "base": attrs.toolchain_dep(providers = [AppleToolchainInfo]), "cxx_toolchain": attrs.toolchain_dep(providers = [CxxToolchainInfo]), diff --git a/prelude/apple/user/apple_tools.bzl b/prelude/apple/user/apple_tools.bzl index c9cf13a9c25..1c6155ffc6e 100644 --- a/prelude/apple/user/apple_tools.bzl +++ b/prelude/apple/user/apple_tools.bzl @@ -8,7 +8,7 @@ load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolsInfo") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -def _impl(ctx: AnalysisContext) -> list[Provider]: +def _apple_tools_impl(ctx: AnalysisContext) -> list[Provider]: return [ DefaultInfo(), AppleToolsInfo( @@ -21,7 +21,8 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: make_modulemap = ctx.attrs.make_modulemap[RunInfo], make_vfsoverlay = ctx.attrs.make_vfsoverlay[RunInfo], selective_debugging_scrubber = ctx.attrs.selective_debugging_scrubber[RunInfo], - swift_objc_header_postprocess = ctx.attrs.swift_objc_header_postprocess[RunInfo], + xcframework_maker = ctx.attrs.xcframework_maker[RunInfo], + framework_sanitizer = ctx.attrs.framework_sanitizer[RunInfo], ), ] @@ -30,17 +31,18 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: # toolchain/SDK specific, they're just internal helper tools. registration_spec = RuleRegistrationSpec( name = "apple_tools", - impl = _impl, + impl = _apple_tools_impl, attrs = { - "adhoc_codesign_tool": attrs.dep(providers = [RunInfo]), + "adhoc_codesign_tool": attrs.option(attrs.dep(providers = [RunInfo]), default = None), "assemble_bundle": attrs.dep(providers = [RunInfo]), "dry_codesign_tool": attrs.dep(providers = [RunInfo]), + "framework_sanitizer": attrs.dep(providers = [RunInfo]), "info_plist_processor": attrs.dep(providers = [RunInfo]), "ipa_package_maker": attrs.dep(providers = [RunInfo]), "make_modulemap": attrs.dep(providers = [RunInfo]), "make_vfsoverlay": attrs.dep(providers = [RunInfo]), "selective_debugging_scrubber": attrs.dep(providers = [RunInfo]), "split_arch_combine_dsym_bundles_tool": attrs.dep(providers = [RunInfo]), - "swift_objc_header_postprocess": attrs.dep(providers = [RunInfo]), + "xcframework_maker": attrs.dep(providers = [RunInfo]), }, ) diff --git a/prelude/apple/user/apple_watchos_bundle.bzl b/prelude/apple/user/apple_watchos_bundle.bzl index 261f68b633b..4a03df0d437 100644 --- a/prelude/apple/user/apple_watchos_bundle.bzl +++ b/prelude/apple/user/apple_watchos_bundle.bzl @@ -6,46 +6,10 @@ # of this source tree. load("@prelude//apple:apple_bundle.bzl", "apple_bundle_impl") -load("@prelude//apple:apple_rules_impl_utility.bzl", "apple_bundle_extra_attrs") +load("@prelude//apple:apple_bundle_attrs.bzl", "apple_watchos_bundle_attrs") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load("@prelude//decls/common.bzl", "Traversal") -load("@prelude//decls/ios_rules.bzl", "AppleBundleExtension") load(":watch_transition.bzl", "watch_transition") -def _apple_bundle_base_attrs(): - return { - # Attributes comes from `attributes.bzl` but since it's autogenerated, we cannot easily abstract - "asset_catalogs_compilation_options": attrs.dict(key = attrs.string(), value = attrs.any(), default = {}), - "binary": attrs.option(attrs.dep(), default = None), - "codesign_flags": attrs.list(attrs.string(), default = []), - "codesign_identity": attrs.option(attrs.string(), default = None), - "contacts": attrs.list(attrs.string(), default = []), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "default_platform": attrs.option(attrs.string(), default = None), - "deps": attrs.list(attrs.dep(), default = []), - "extension": attrs.one_of(attrs.enum(AppleBundleExtension), attrs.string()), - "ibtool_flags": attrs.option(attrs.list(attrs.string()), default = None), - "ibtool_module_flag": attrs.option(attrs.bool(), default = None), - "incremental_bundling_enabled": attrs.option(attrs.bool(), default = None), - "info_plist": attrs.source(), - "info_plist_substitutions": attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False, default = {}), - "labels": attrs.list(attrs.string(), default = []), - "licenses": attrs.list(attrs.source(), default = []), - "platform_binary": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.dep())), default = None), - "product_name": attrs.option(attrs.string(), default = None), - "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), - "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), - "try_skip_code_signing": attrs.option(attrs.bool(), default = None), - "xcode_product_type": attrs.option(attrs.string(), default = None), - } - -def _apple_watchos_bundle_attrs(): - attributes = {} - attributes.update(_apple_bundle_base_attrs()) - attributes.update(apple_bundle_extra_attrs()) - return attributes - def apple_watchos_bundle_impl(ctx: AnalysisContext) -> list[Provider]: # This rule is _equivalent_ to `apple_bundle` except it applies # an incoming watchOS transition. @@ -54,6 +18,6 @@ def apple_watchos_bundle_impl(ctx: AnalysisContext) -> list[Provider]: registration_spec = RuleRegistrationSpec( name = "apple_watchos_bundle", impl = apple_watchos_bundle_impl, - attrs = _apple_watchos_bundle_attrs(), + attrs = apple_watchos_bundle_attrs(), cfg = watch_transition, ) diff --git a/prelude/apple/user/apple_xcframework.bzl b/prelude/apple/user/apple_xcframework.bzl new file mode 100644 index 00000000000..03aedbf6cf8 --- /dev/null +++ b/prelude/apple/user/apple_xcframework.bzl @@ -0,0 +1,185 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolsInfo") +load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") + +def _apple_xcframework_impl(ctx: AnalysisContext) -> list[Provider]: + apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] + + xcframework_dir = ctx.actions.declare_output(ctx.attrs.framework_name + ".xcframework", dir = True) + xcframework_command = cmd_args([ + apple_tools.xcframework_maker, + "--output-path", + xcframework_dir.as_output(), + "--name", + ctx.attrs.framework_name, + ]) + + for arch in ctx.attrs.framework: + framework_dep = ctx.attrs.framework[arch] + framework_paths = framework_dep[DefaultInfo].default_outputs + if len(framework_paths) > 1: + fail("xcframework's framework target {} must only produce one output".format(framework_dep.label)) + + xcframework_command.add("--framework-path") + xcframework_command.add(arch) + xcframework_command.add(framework_paths[0]) + + if ctx.attrs.include_dsym: + dsym_dep = framework_dep[DefaultInfo].sub_targets["dsym"] + dsym_path = dsym_dep[DefaultInfo].default_outputs + xcframework_command.add("--dsym-path") + xcframework_command.add(arch) + xcframework_command.add(dsym_path) + + ctx.actions.run(xcframework_command, category = "apple_xcframework") + return [ + DefaultInfo(default_output = xcframework_dir), + ] + +def _strip_os_sdk_and_runtime_constraints(platform: PlatformInfo, refs: struct) -> dict[TargetLabel, ConstraintValueInfo]: + return { + constraint_setting_label: constraint_setting_value + for (constraint_setting_label, constraint_setting_value) in platform.configuration.constraints.items() + if constraint_setting_label not in [refs.os[ConstraintSettingInfo].label, refs.sdk[ConstraintSettingInfo].label, refs.universal[ConstraintSettingInfo].label, refs.runtime[ConstraintSettingInfo].label] + } + +# provides a map of os-platform to cpu architectures +# so we can identify when universal binaries can be created instead of +# two separate frameworks +# +# e.g. input of ["ios-arm64", "iphonesimulator-x86_64", "iphonesimulator-arm64"] +# will produce {"ios": ["arm64"], "iphonesimulator": ["arm64", "x86_64"]} + +def _normalize_platforms(platforms: list[str]) -> dict[str, list[str]]: + result = {} + for platform in platforms: + plat_list = platform.split("-") + plat_type = plat_list[0] + plat_archs = plat_list[1:] + previous_archs = result.get(plat_type, []) + result[plat_type] = sorted(plat_archs + previous_archs) + + return result + +def _apple_xcframework_framework_attrib_split_transition_impl( + platform: PlatformInfo, + refs: struct, + attrs: struct) -> dict[str, PlatformInfo]: + result = {} + + new_platforms = _normalize_platforms(attrs.platforms).items() + for os_value, cpu_values in new_platforms: + updated_constraints = _strip_os_sdk_and_runtime_constraints(platform, refs) + + canonical_platform_suffix = "" + + if os_value == "macos": + canonical_platform_prefix = "macos" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.macos[ConstraintValueInfo] + elif os_value == "iphoneos": + canonical_platform_prefix = "ios" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.ios[ConstraintValueInfo] + updated_constraints[refs.sdk[ConstraintSettingInfo].label] = refs.ios_device_sdk[ConstraintValueInfo] + elif os_value == "watchos": + canonical_platform_prefix = "watchos" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.watchos[ConstraintValueInfo] + updated_constraints[refs.sdk[ConstraintSettingInfo].label] = refs.watchos_device_sdk[ConstraintValueInfo] + elif os_value == "iphonesimulator": + canonical_platform_prefix = "ios" + canonical_platform_suffix = "simulator" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.ios[ConstraintValueInfo] + updated_constraints[refs.sdk[ConstraintSettingInfo].label] = refs.ios_simulator_sdk[ConstraintValueInfo] + elif os_value == "watchossimulator": + canonical_platform_prefix = "watchos" + canonical_platform_suffix = "simulator" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.watchos[ConstraintValueInfo] + updated_constraints[refs.sdk[ConstraintSettingInfo].label] = refs.watchos_simulator_sdk[ConstraintValueInfo] + elif os_value == "maccatalyst": + canonical_platform_prefix = "ios" + canonical_platform_suffix = "maccatalyst" + updated_constraints[refs.os[ConstraintSettingInfo].label] = refs.ios[ConstraintValueInfo] + updated_constraints[refs.sdk[ConstraintSettingInfo].label] = refs.maccatalyst_sdk[ConstraintValueInfo] + updated_constraints[refs.runtime[ConstraintSettingInfo].label] = refs.maccatalyst_runtime[ConstraintValueInfo] + else: + fail("Unsupported OS value {} in apple_xcframework() platforms.".format(os_value)) + + cpu_constraint_name = refs.cpu[ConstraintSettingInfo].label + + if len(cpu_values) > 1: + updated_constraints[refs.universal[ConstraintSettingInfo].label] = refs.universal_enabled[ConstraintValueInfo] + elif cpu_values[0] == "arm64": + updated_constraints[cpu_constraint_name] = refs.arm64[ConstraintValueInfo] + elif cpu_values[0] == "x86_64": + updated_constraints[cpu_constraint_name] = refs.x86_64[ConstraintValueInfo] + else: + fail("Unsupported CPU value {} in apple_xcframework().".format(cpu_values[0])) + + new_cfg = ConfigurationInfo( + constraints = updated_constraints, + values = platform.configuration.values, + ) + + canonical_platform_name = canonical_platform_prefix + "-" + "_".join(cpu_values) + if len(canonical_platform_suffix) > 0: + canonical_platform_name += "-" + canonical_platform_suffix + + result.update({canonical_platform_name: PlatformInfo( + label = canonical_platform_name + "_transition", + configuration = new_cfg, + )}) + + return result + +framework_split_transition = transition( + impl = _apple_xcframework_framework_attrib_split_transition_impl, + refs = { + "arm32": "config//cpu/constraints:arm32", + "arm64": "config//cpu/constraints:arm64", + "cpu": "config//cpu/constraints:cpu", + "ios": "config//os/constraints:iphoneos", + "ios_device_sdk": "config//os/sdk/apple/constraints:iphoneos", + "ios_simulator_sdk": "config//os/sdk/apple/constraints:iphonesimulator", + "maccatalyst_runtime": "config//runtime/constraints:maccatalyst", + "maccatalyst_sdk": "config//os/sdk/apple/constraints:maccatalyst", + "macos": "config//os/constraints:macos", + "os": "config//os/constraints:os", + "runtime": "config//runtime/constraints:runtime", + "sdk": "config//os/sdk/apple/constraints:_", + "swift_library_evolution": "config//features/apple/constraints:swift_library_evolution", + "swift_library_evolution_enabled": "config//features/apple/constraints:swift_library_evolution_enabled", + "universal": "config//cpu/constraints:universal", + "universal_enabled": "config//cpu/constraints:universal-enabled", + "watchos": "config//os/constraints:watchos", + "watchos_device_sdk": "config//os/sdk/apple/constraints:watchos", + "watchos_simulator_sdk": "config//os/sdk/apple/constraints:watchsimulator", + "x86_64": "config//cpu/constraints:x86_64", + }, + attrs = [ + "platforms", + ], + split = True, +) + +registration_spec = RuleRegistrationSpec( + name = "apple_xcframework", + impl = _apple_xcframework_impl, + attrs = { + "framework": attrs.split_transition_dep(cfg = framework_split_transition), + "framework_name": attrs.string(), + "include_dsym": attrs.option(attrs.bool(), default = None), + "platforms": attrs.list(attrs.string(), default = []), + "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), + }, +) + +def apple_xcframework_extra_attrs(): + attribs = { + "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), + } + return attribs diff --git a/prelude/apple/user/cpu_split_transition.bzl b/prelude/apple/user/cpu_split_transition.bzl index 4beda50023b..0ba17eeddc8 100644 --- a/prelude/apple/user/cpu_split_transition.bzl +++ b/prelude/apple/user/cpu_split_transition.bzl @@ -52,7 +52,7 @@ def _cpu_split_transition_impl( cpu_name_to_cpu_constraint = {} if os_label == refs.ios[ConstraintValueInfo].label: - if sdk == None or sdk_label == refs.ios_simulator_sdk[ConstraintValueInfo].label: + if sdk == None or sdk_label == refs.ios_simulator_sdk[ConstraintValueInfo].label or sdk_label == refs.maccatalyst_sdk[ConstraintValueInfo].label: # default to simulator if SDK is not specified cpu_name_to_cpu_constraint["arm64"] = refs.arm64[ConstraintValueInfo] cpu_name_to_cpu_constraint["x86_64"] = refs.x86_64[ConstraintValueInfo] @@ -63,10 +63,9 @@ def _cpu_split_transition_impl( elif os_label == refs.watchos[ConstraintValueInfo].label: if sdk == None or sdk_label == refs.watchos_simulator_sdk[ConstraintValueInfo].label: cpu_name_to_cpu_constraint["arm64"] = refs.arm64[ConstraintValueInfo] - cpu_name_to_cpu_constraint["x86_64"] = refs.x86_64[ConstraintValueInfo] elif sdk_label == refs.watchos_device_sdk[ConstraintValueInfo].label: cpu_name_to_cpu_constraint["arm64"] = refs.arm64[ConstraintValueInfo] - cpu_name_to_cpu_constraint["arm32"] = refs.arm32[ConstraintValueInfo] + cpu_name_to_cpu_constraint["arm64_32"] = refs.arm64_32[ConstraintValueInfo] else: fail("Unsupported SDK {} for WatchOS".format(sdk_label)) elif os_label == refs.macos[ConstraintValueInfo].label: @@ -103,15 +102,17 @@ cpu_split_transition = transition( refs = { "arm32": "config//cpu/constraints:arm32", "arm64": "config//cpu/constraints:arm64", + "arm64_32": "config//cpu/constraints:arm64_32", "cpu": "config//cpu/constraints:cpu", "ios": "config//os/constraints:iphoneos", "ios_device_sdk": "config//os/sdk/apple/constraints:iphoneos", "ios_simulator_sdk": "config//os/sdk/apple/constraints:iphonesimulator", + "maccatalyst_sdk": "config//os/sdk/apple/constraints:maccatalyst", "macos": "config//os/constraints:macos", "os": "config//os/constraints:os", "sdk": "config//os/sdk/apple/constraints:_", - "universal": "config//build_mode/apple/constraints:universal", - "universal_enabled": "config//build_mode/apple/constraints:universal-enabled", + "universal": "config//cpu/constraints:universal", + "universal_enabled": "config//cpu/constraints:universal-enabled", "watchos": "config//os/constraints:watchos", "watchos_device_sdk": "config//os/sdk/apple/constraints:watchos", "watchos_simulator_sdk": "config//os/sdk/apple/constraints:watchsimulator", diff --git a/prelude/apple/user/macos_transition.bzl b/prelude/apple/user/macos_transition.bzl new file mode 100644 index 00000000000..74d6bca1057 --- /dev/null +++ b/prelude/apple/user/macos_transition.bzl @@ -0,0 +1,52 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//transitions:utils.bzl", "filtered_platform_constraints", "get_constraint_value") +load("@prelude//utils:expect.bzl", "expect") + +""" +Transition from catalyst SDK to macOS SDK. Used for AppKit extension bundle rules. +Transforms both OS and SDK constraints. +Only sanity check for source configuration is done. +""" + +def _macos_transition_impl(platform: PlatformInfo, refs: struct) -> PlatformInfo: + # This functions operates in the following way: + # - Start with all the constraints from the platform and filter out the constraints for OS and SDK. + # - Always set the new OS constraint to macOS. + # - If the old SDK constraint was macCatalyst, replace with the equivalent macOS constraint. + # - Return a new platform with the updated constraints. + updated_constraints = filtered_platform_constraints(platform, [refs.os[ConstraintSettingInfo].label, refs.sdk[ConstraintSettingInfo].label]) + + macos = refs.macos[ConstraintValueInfo] + updated_constraints[refs.os[ConstraintSettingInfo].label] = macos + + # Update SDK constraint + old_sdk = get_constraint_value(platform, refs.sdk[ConstraintSettingInfo]) + maccatalyst_sdk = refs.maccatalyst_sdk[ConstraintValueInfo] + macosx_sdk = refs.macos_sdk[ConstraintValueInfo] + + if old_sdk != None: + expect(old_sdk.label in [macosx_sdk.label, maccatalyst_sdk.label], "If present, SDK transitioned non-identically to macOS should be `macCatalyst`, got {}".format(old_sdk.label)) + updated_constraints[refs.sdk[ConstraintSettingInfo].label] = macosx_sdk + + new_cfg = ConfigurationInfo( + constraints = updated_constraints, + values = platform.configuration.values, + ) + return PlatformInfo( + label = "macos_transition", + configuration = new_cfg, + ) + +macos_transition = transition(impl = _macos_transition_impl, refs = { + "maccatalyst_sdk": "config//os/sdk/apple/constraints:maccatalyst", + "macos": "config//os/constraints:macos", + "macos_sdk": "config//os/sdk/apple/constraints:macosx", + "os": "config//os/constraints:os", + "sdk": "config//os/sdk/apple/constraints:_", +}) diff --git a/prelude/apple/user/resource_group_map.bzl b/prelude/apple/user/resource_group_map.bzl index 850f17fa358..3ad9549aff5 100644 --- a/prelude/apple/user/resource_group_map.bzl +++ b/prelude/apple/user/resource_group_map.bzl @@ -7,6 +7,7 @@ load( "@prelude//apple:resource_groups.bzl", + "ResourceGraphNode", # @unused Used as a type "ResourceGroupInfo", "create_resource_graph", "get_resource_graph_node_map_func", @@ -15,29 +16,31 @@ load( "@prelude//cxx:groups.bzl", "compute_mappings", "create_group", + "get_roots_from_mapping", "make_info_subtarget_providers", "parse_groups_definitions", ) +load( + "@prelude//cxx:groups_types.bzl", + "GroupMapping", # @unused Used as a type + "Traversal", +) load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load("@prelude//decls/common.bzl", "Traversal") - -def resource_group_map_attr(): - return attrs.option(attrs.dep(providers = [ResourceGroupInfo]), default = None) +load("@prelude//utils:utils.bzl", "flatten") -def _impl(ctx: AnalysisContext) -> list[Provider]: +def _resource_group_map_impl(ctx: AnalysisContext) -> list[Provider]: resource_groups = parse_groups_definitions(ctx.attrs.map, lambda root: root.label) - # Extract deps from the roots via the raw attrs, as `parse_groups_definitions` - # parses them as labels. - resource_groups_deps = [ - mapping[0] - for entry in ctx.attrs.map - for mapping in entry[1] - ] + resource_group_to_implicit_deps_mapping = { + group: flatten([get_roots_from_mapping(mapping) for mapping in mappings]) + for group, mappings in ctx.attrs.map + } + flattend_resource_group_deps = flatten(resource_group_to_implicit_deps_mapping.values()) + resource_graph = create_resource_graph( ctx = ctx, labels = [], - deps = resource_groups_deps, + deps = flattend_resource_group_deps, exported_deps = [], ) resource_graph_node_map = get_resource_graph_node_map_func(resource_graph)() @@ -49,11 +52,10 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: # ResourceGraphInfo, which `create_resource_graph` removes above. # So make sure we remove them from the mappings too, otherwise # `compute_mappings` crashes on the inconsistency. - mappings = [ - mapping - for mapping in group.mappings - if mapping.root == None or mapping.root in resource_graph_node_map - ], + mappings = filter( + None, + [_fixup_mapping_to_only_include_roots_in_the_map(m, resource_graph_node_map) for m in group.mappings], + ), ) for group in resource_groups }, @@ -71,14 +73,44 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: # referenced in our roots, so propagate them here. # NOTE(agallagher): We do this to maintain existing behavior here # but it's not clear if it's actually desirable behavior. - implicit_deps = resource_groups_deps, + resource_group_to_implicit_deps_mapping = resource_group_to_implicit_deps_mapping, ), ] +def _fixup_mapping_to_only_include_roots_in_the_map(mapping: GroupMapping, node_map: dict[Label, ResourceGraphNode]) -> GroupMapping | None: + if not mapping.roots: + return mapping + + filtered_roots = [ + root + for root in mapping.roots + if root in node_map + ] + if not filtered_roots: + return None + + return GroupMapping( + roots = filtered_roots, + traversal = mapping.traversal, + filters = mapping.filters, + preferred_linkage = mapping.preferred_linkage, + ) + registration_spec = RuleRegistrationSpec( name = "resource_group_map", - impl = _impl, + impl = _resource_group_map_impl, attrs = { - "map": attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), + "map": attrs.list( + attrs.tuple( + attrs.string(), + attrs.list( + attrs.tuple( + attrs.one_of(attrs.dep(), attrs.list(attrs.dep())), + attrs.enum(Traversal.values()), + attrs.option(attrs.string()), + ), + ), + ), + ), }, ) diff --git a/prelude/apple/user/target_sdk_version_transition.bzl b/prelude/apple/user/target_sdk_version_transition.bzl new file mode 100644 index 00000000000..d602fd3dab6 --- /dev/null +++ b/prelude/apple/user/target_sdk_version_transition.bzl @@ -0,0 +1,50 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Apply a constraint if the propagated_target_sdk_version attribute is set. +This overrides any existing target_sdk_version select. +""" + +load("@prelude//apple:versions.bzl", "TARGET_SDK_VERSIONS") + +def _target_sdk_version_transition_impl(platform: PlatformInfo, refs: struct, attrs: struct) -> PlatformInfo: + target_sdk_version = attrs.propagated_target_sdk_version + if not target_sdk_version: + return platform + + constraint_label = refs.version[ConstraintSettingInfo].label + constraint_value = platform.configuration.constraints.get(constraint_label) + version_provider = getattr(refs, target_sdk_version, None) + if version_provider == None: + fail("target sdk version {} is missing constraints".format(target_sdk_version)) + + version_constraint = version_provider[ConstraintValueInfo] + if constraint_value == version_constraint: + return platform + + updated_constraints = platform.configuration.constraints + updated_constraints[constraint_label] = version_constraint + new_cfg = ConfigurationInfo( + constraints = updated_constraints, + values = platform.configuration.values, + ) + return PlatformInfo( + label = platform.label + "_target_sdk_version_" + target_sdk_version, + configuration = new_cfg, + ) + +target_sdk_version_transition = transition( + impl = _target_sdk_version_transition_impl, + refs = dict( + [("version", "@config//version:constraint-setting-target-sdk-version")] + { + version: "@config//version:constraint-value-target-sdk-version-" + version + for version in TARGET_SDK_VERSIONS + }.items(), + ), + attrs = ["propagated_target_sdk_version"], +) diff --git a/prelude/apple/user/watch_transition.bzl b/prelude/apple/user/watch_transition.bzl index 2ec70ebb61d..22e1dd241e4 100644 --- a/prelude/apple/user/watch_transition.bzl +++ b/prelude/apple/user/watch_transition.bzl @@ -11,31 +11,19 @@ Transforms both OS and SDK constraints. Only sanity check for source configuration is done. """ +load("@prelude//transitions:utils.bzl", "filtered_platform_constraints", "get_constraint_value") load("@prelude//utils:expect.bzl", "expect") -def _os_and_sdk_unrelated_constraints(platform: PlatformInfo, refs: struct) -> dict[TargetLabel, ConstraintValueInfo]: - return { - constraint_setting_label: constraint_setting_value - for (constraint_setting_label, constraint_setting_value) in platform.configuration.constraints.items() - if constraint_setting_label not in [refs.os[ConstraintSettingInfo].label, refs.sdk[ConstraintSettingInfo].label] - } - -def _old_os_constraint_value(platform: PlatformInfo, refs: struct) -> [None, ConstraintValueInfo]: - return platform.configuration.constraints.get(refs.os[ConstraintSettingInfo].label) - -def _old_sdk_constraint_value(platform: PlatformInfo, refs: struct) -> [None, ConstraintValueInfo]: - return platform.configuration.constraints.get(refs.sdk[ConstraintSettingInfo].label) - -def _impl(platform: PlatformInfo, refs: struct) -> PlatformInfo: +def _watch_transition_impl(platform: PlatformInfo, refs: struct) -> PlatformInfo: # This functions operates in the following way: # - Start with all the constraints from the platform and filter out the constraints for OS and SDK. # - If the old OS constraint was iOS or watchOS, set the new constraint to be always watchOS. # - If the old SDK constraint was iOS, replace with the equivalent watchOS constraint. # - Return a new platform with the updated constraints. - updated_constraints = _os_and_sdk_unrelated_constraints(platform, refs) + updated_constraints = filtered_platform_constraints(platform, [refs.os[ConstraintSettingInfo].label, refs.sdk[ConstraintSettingInfo].label]) # Update OS constraint - old_os = _old_os_constraint_value(platform, refs) + old_os = get_constraint_value(platform, refs.os[ConstraintSettingInfo]) watchos = refs.watchos[ConstraintValueInfo] ios = refs.ios[ConstraintValueInfo] if old_os != None: @@ -43,7 +31,7 @@ def _impl(platform: PlatformInfo, refs: struct) -> PlatformInfo: updated_constraints[refs.os[ConstraintSettingInfo].label] = watchos # Update SDK constraint - old_sdk = _old_sdk_constraint_value(platform, refs) + old_sdk = get_constraint_value(platform, refs.sdk[ConstraintSettingInfo]) watchos_device_sdk = refs.watchos_device_sdk[ConstraintValueInfo] watchos_simulator_sdk = refs.watchos_simulator_sdk[ConstraintValueInfo] ios_device_sdk = refs.ios_device_sdk[ConstraintValueInfo] @@ -71,7 +59,7 @@ def _impl(platform: PlatformInfo, refs: struct) -> PlatformInfo: configuration = new_cfg, ) -watch_transition = transition(impl = _impl, refs = { +watch_transition = transition(impl = _watch_transition_impl, refs = { "ios": "config//os/constraints:iphoneos", "ios_device_sdk": "config//os/sdk/apple/constraints:iphoneos", "ios_simulator_sdk": "config//os/sdk/apple/constraints:iphonesimulator", diff --git a/prelude/apple/validation/debug_artifacts.bzl b/prelude/apple/validation/debug_artifacts.bzl new file mode 100644 index 00000000000..d1b6db0a36c --- /dev/null +++ b/prelude/apple/validation/debug_artifacts.bzl @@ -0,0 +1,98 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load( + "@prelude//:artifact_tset.bzl", + "ArtifactTSet", # @unused Used as a type +) + +_AnalysisInput = record( + argsfile = field(Artifact), + identifier = field(int), +) + +def get_debug_artifacts_validators(ctx, artifacts: ArtifactTSet) -> dict[str, Artifact]: + label_to_input_artifacts = _get_analysis_input_artifacts(ctx, artifacts) + if not label_to_input_artifacts: + return {} + + name_to_validation_result = {} + for key, validator in ctx.attrs.debug_artifacts_validators.items(): + analysis, reducer = validator + label_to_analysis_artifacts = _analyze_artifacts(ctx, key, analysis[RunInfo], label_to_input_artifacts) + name_to_validation_result[key] = _reduce_analysis_artifacts(ctx, key, reducer[RunInfo], label_to_analysis_artifacts) + + return name_to_validation_result + +def _get_analysis_input_artifacts(ctx, artifacts: ArtifactTSet) -> dict[Label, list[_AnalysisInput]]: + underlying_tset = artifacts._tset + if underlying_tset == None: + return {} + + results = {} + identifier = 0 + for infos in underlying_tset.traverse(): + for info in infos: + argsfile = ctx.actions.write( + "artifacts-{}.txt".format(identifier), + info.artifacts, + with_inputs = True, + ) + results.setdefault(info.label, []).append( + _AnalysisInput(argsfile = argsfile, identifier = identifier), + ) + identifier += 1 + return results + +def _analyze_artifacts( + ctx, + key: str, + analysis_tool: RunInfo, + label_to_artifacts: dict[Label, list[_AnalysisInput]]) -> dict[Label, list[Artifact]]: + label_to_analysis = {} + for label, inputs in label_to_artifacts.items(): + for input in inputs: + output = ctx.actions.declare_output("{}_{}.json".format(key, input.identifier)) + ctx.actions.run( + cmd_args([ + analysis_tool, + "--artifacts", + cmd_args(input.argsfile, format = "@{}"), + "--output", + output.as_output(), + ]), + category = "{}_analysis".format(key), + identifier = "{}_{}".format(ctx.attrs.name, input.identifier), + ) + label_to_analysis.setdefault(label, []).append(output) + + return label_to_analysis + +def _reduce_analysis_artifacts( + ctx, + key: str, + reducer_tool: RunInfo, + label_to_artifacts: dict[Label, list[Artifact]]) -> Artifact: + input_json = ctx.actions.write_json( + "{}_reducer_args.json".format(key), + label_to_artifacts, + with_inputs = True, + ) + + output = ctx.actions.declare_output("{}.json".format(key)) + ctx.actions.run( + cmd_args([ + reducer_tool, + "--analysis-json-path", + input_json, + "--output", + output.as_output(), + ]), + category = "{}_reduce".format(key), + identifier = ctx.attrs.name, + ) + return output diff --git a/prelude/apple/versions.bzl b/prelude/apple/versions.bzl new file mode 100644 index 00000000000..881d0d056b9 --- /dev/null +++ b/prelude/apple/versions.bzl @@ -0,0 +1,39 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# All the target SDK versions currently in use. +TARGET_SDK_VERSIONS = [ + "1.0", + "4.0", + "7.0", + "9.0", + "10.0", + "10.10", + "10.12", + "10.13", + "10.14", + "10.15", + "11.0", + "11.3", + "12.0", + "12.4", + "13.0", + "13.1", + "13.3", + "13.4", + "14.0", + "14.2", + "15.0", + "15.1", + "15.2", + "15.5", + "16.0", + "16.1", + "16.3", + "17.0", + "18.0", +] diff --git a/prelude/apple/xcode.bzl b/prelude/apple/xcode.bzl index 84ae3df5f7b..b067ceb5425 100644 --- a/prelude/apple/xcode.bzl +++ b/prelude/apple/xcode.bzl @@ -13,28 +13,32 @@ load( "CompileArgsfile", # @unused Used as a type ) load( - "@prelude//cxx:compile.bzl", + "@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type ) load("@prelude//cxx:xcode.bzl", "cxx_populate_xcode_attributes") +load("@prelude//ide_integrations:xcode.bzl", "XcodeDataInfoKeys") load("@prelude//utils:expect.bzl", "expect") def apple_populate_xcode_attributes( ctx, srcs: list[CxxSrcWithFlags], argsfiles: dict[str, CompileArgsfile], - product_name: str) -> dict[str, typing.Any]: + product_name: str, + contains_swift_sources: bool = False) -> dict[str, typing.Any]: data = cxx_populate_xcode_attributes(ctx = ctx, srcs = srcs, argsfiles = argsfiles, product_name = product_name) + data[XcodeDataInfoKeys.CONTAINS_SWIFT_SOURCES] = contains_swift_sources + if has_apple_toolchain(ctx): - data["arch"] = get_apple_architecture(ctx) - data["sdk"] = get_apple_sdk_name(ctx) - data["deployment_version"] = get_min_deployment_version_for_node(ctx) + data[XcodeDataInfoKeys.ARCH] = get_apple_architecture(ctx) + data[XcodeDataInfoKeys.SDK] = get_apple_sdk_name(ctx) + data[XcodeDataInfoKeys.DEPLOYMENT_VERSION] = get_min_deployment_version_for_node(ctx) if hasattr(ctx.attrs, "swift_version"): swift_version = ctx.attrs.swift_version if swift_version != None: - data["swift_version"] = swift_version + data[XcodeDataInfoKeys.SWIFT_VERSION] = swift_version apple_xcode_data_add_xctoolchain(ctx, data) return data @@ -65,7 +69,3 @@ def _get_attribute_with_output(ctx: AnalysisContext, attr_name: str) -> [Depende # So, an empty `DefaultInfo` basically signifies that there's no xctoolchain. return dep return None - -def get_project_root_file(ctx) -> Artifact: - content = cmd_args(ctx.label.project_root) - return ctx.actions.write("project_root", content, absolute = True) diff --git a/prelude/artifact_tset.bzl b/prelude/artifact_tset.bzl index 3e2152a5b74..0df80f907c4 100644 --- a/prelude/artifact_tset.bzl +++ b/prelude/artifact_tset.bzl @@ -11,9 +11,18 @@ load( "flatten", ) +# Generic tag to provide more information about the artifact +ArtifactInfoTag = enum( + # Describes swiftmodule artifact generated by swift rules. + "swiftmodule", + # Describes clang pcm module artifact generated and consumed by swift rules. + "swift_pcm", +) + ArtifactInfo = record( label = field(Label), artifacts = field(list[Artifact]), + tags = field(list[ArtifactInfoTag]), ) def _get_artifacts(entries: list[ArtifactInfo]) -> list[Artifact]: @@ -35,7 +44,8 @@ def make_artifact_tset( label: [Label, None] = None, artifacts: list[Artifact] = [], infos: list[ArtifactInfo] = [], - children: list[ArtifactTSet] = []) -> ArtifactTSet: + children: list[ArtifactTSet] = [], + tags: list[ArtifactInfoTag] = []) -> ArtifactTSet: expect( label != None or not artifacts, "must pass in `label` to associate with artifacts", @@ -47,7 +57,7 @@ def make_artifact_tset( # Build list of all non-child values. values = [] if artifacts: - values.append(ArtifactInfo(label = label, artifacts = artifacts)) + values.append(ArtifactInfo(label = label, artifacts = artifacts, tags = tags)) values.extend(infos) # If there's no children or artifacts, return `None`. diff --git a/prelude/artifacts.bzl b/prelude/artifacts.bzl index 18d14f18394..e9d65f253c3 100644 --- a/prelude/artifacts.bzl +++ b/prelude/artifacts.bzl @@ -35,6 +35,27 @@ ArtifactOutputs = record( other_outputs = field(list[ArgLike]), ) +# Wrapper to support wrapping `Artifact`s referencing paths behind external +# symlinks. +ArtifactExt = record( + artifact = field(Artifact), + # If the `artifact` above is a symlink referencing an external path, this + # is an optional sub-path to append when accessing the path. + sub_path = field(str | None, None), + # Returns the resolved path as a `cmd_arg()`, with the optional sub-path + # appended. + as_arg = field(typing.Callable), + join = field(typing.Callable), +) + +# A Provider that mirrors `DefaultInfo` for `Artifact` outputs, but allows +# specifying an `ArtifactExt` as it's default output. +DefaultOutputExt = provider( + fields = dict( + default_output = provider_field(ArtifactExt), + ), +) + def single_artifact(dep: Artifact | Dependency) -> ArtifactOutputs: if type(dep) == "artifact": return ArtifactOutputs( @@ -123,3 +144,33 @@ def unpack_artifact_map(artifacts: dict[str, Artifact | Dependency]) -> dict[str out[name] = single_artifact(artifact) return out + +def _as_arg(artifact: Artifact, sub_path: str | None) -> ArgLike: + if sub_path == None: + return artifact + return cmd_args(artifact, format = "{{}}/{}".format(sub_path)) + +def artifact_ext( + artifact: Artifact, + sub_path: str | None = None) -> ArtifactExt: + return ArtifactExt( + artifact = artifact, + sub_path = sub_path, + as_arg = lambda: _as_arg(artifact, sub_path), + join = lambda p: artifact_ext( + artifact = artifact, + sub_path = p if sub_path == None else paths.join(sub_path, p), + ), + ) + +def to_artifact_ext(src: Artifact | Dependency) -> ArtifactExt: + if type(src) == "dependency": + ext = src.get(DefaultOutputExt) + if ext != None: + return ext.default_output + else: + (src,) = src[DefaultInfo].default_outputs + return artifact_ext(src) + +def to_arglike(src: Artifact | Dependency) -> ArgLike: + return to_artifact_ext(src).as_arg() diff --git a/prelude/attrs_validators.bzl b/prelude/attrs_validators.bzl new file mode 100644 index 00000000000..74f41acef2d --- /dev/null +++ b/prelude/attrs_validators.bzl @@ -0,0 +1,48 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +AttrsValidatorsInfo = provider( + fields = { + "func": typing.Callable[[AnalysisActions, Label, struct], dict[str, Artifact]], + }, +) + +ATTRS_VALIDATORS_NAME = "attrs_validators" +ATTRS_VALIDATORS_TYPE = attrs.option(attrs.list(attrs.dep(providers = [AttrsValidatorsInfo])), default = None) + +def get_attrs_validators_outputs(ctx: AnalysisContext) -> (list[Provider], dict[str, list[Provider]]): + validators = getattr(ctx.attrs, ATTRS_VALIDATORS_NAME, []) + if not validators: + return ([], {}) + + specs = [] + sub_targets = {} + for validator in validators: + for name, output in validator[AttrsValidatorsInfo].func(ctx.actions, ctx.label, ctx.attrs).items(): + specs.append(ValidationSpec(name = name, validation_result = output)) + + if name in sub_targets: + fail("Collison: two attrs_validators with the same name '{}': {} and {}".format( + name, + output, + sub_targets[name], + )) + + sub_targets[name] = [DefaultInfo(output)] + + return ( + [ValidationInfo(validations = specs)] if specs else [], + { + "attrs-validators": [ + DefaultInfo( + # It'll be expensive to put all the artifacts in here, just skip it. + default_outputs = None, + sub_targets = sub_targets, + ), + ], + } if sub_targets else {}, + ) diff --git a/prelude/cfg/modifier/alias.bzl b/prelude/cfg/modifier/alias.bzl new file mode 100644 index 00000000000..1838bc93ccb --- /dev/null +++ b/prelude/cfg/modifier/alias.bzl @@ -0,0 +1,28 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Modifier aliases that can be used on the CLI, ex. after `--modifier=` or in `?`. + +These aliases are ones we share between internal and OSS usages. +All constraints used in these aliases must also be available in OSS. +""" + +# It's represented as a struct where the attribute name is the alias and the string +# for the attribute is the fully qualified target. Defining aliases in a struct +# helps enforce that the alias names do not contain any bad character we cannot use on CLI. +# +# We define aliases for modifiers here rather than reusing `alias` section of buckconfig for +# several reasons. +# 1. `alias` buckconconfig is not well-designed. It only supports aliases in a cell and not +# global aliases and users can override aliases in modefiles. +# 2. Modifier aliases can point to conditional modifiers, which `alias` buckconfig does not +# suppport. +# 3. It's unlikely a user has to ever define an alias twice in both the `alias` buckconfig +# and in modifier aliases because a modifier alias is a constraint value/config setting +# and those don't typically get built on CLI. +OSS_ALIASES = struct() diff --git a/prelude/cfg/modifier/asserts.bzl b/prelude/cfg/modifier/asserts.bzl new file mode 100644 index 00000000000..214c7b30cef --- /dev/null +++ b/prelude/cfg/modifier/asserts.bzl @@ -0,0 +1,31 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//cfg/modifier:types.bzl", "Modifier", "is_modifiers_match") + +def verify_normalized_target(target: str): + # Do some basic checks that target looks reasonably valid and normalized + # Targets should always be fully qualified to improve readability. + if "//" not in target or target.startswith("//") or ":" not in target: + fail( + "Must specify fully qualified target (ex. `cell//foo:bar`). Found `{}`".format( + target, + ), + ) + +def verify_normalized_modifier(modifier: Modifier): + if modifier == None: + pass + elif is_modifiers_match(modifier): + # TODO(scottcao): Add a test case for this once `bxl_test` supports testing failures + for key, sub_modifier in modifier.items(): + if key != "_type": + verify_normalized_modifier(sub_modifier) + elif isinstance(modifier, str): + verify_normalized_target(modifier) + else: + fail("Found unexpected modifier `{}` type `{}`".format(modifier, type(modifier))) diff --git a/prelude/cfg/modifier/cfg_constructor.bzl b/prelude/cfg/modifier/cfg_constructor.bzl new file mode 100644 index 00000000000..7c24a0176b8 --- /dev/null +++ b/prelude/cfg/modifier/cfg_constructor.bzl @@ -0,0 +1,185 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//utils:graph_utils.bzl", "post_order_traversal") +load( + ":common.bzl", + "get_constraint_setting_deps", + "get_modifier_info", + "json_to_tagged_modifiers", + "modifier_to_refs", + "resolve_alias", + "resolve_modifier", +) +load(":name.bzl", "cfg_name") +load( + ":types.bzl", + "Modifier", # @unused + "ModifierCliLocation", + "ModifierTargetLocation", + "TaggedModifiers", +) + +PostConstraintAnalysisParams = record( + legacy_platform = PlatformInfo | None, + # Merged modifier from PACKAGE, target, and cli modifiers. + merged_modifiers = list[TaggedModifiers], + extra_data = struct, +) + +def cfg_constructor_pre_constraint_analysis( + *, + legacy_platform: PlatformInfo | None, + # dict[str, typing.Any] is JSON dictionary form of `TaggedModifier` passed from buck2 core + package_modifiers: list[dict[str, typing.Any]] | None, + # typing.Any is JSON form of modifier + target_modifiers: list[Modifier] | None, + cli_modifiers: list[str], + rule_name: str, + aliases: struct, + extra_data: struct, + **_kwargs) -> (list[str], PostConstraintAnalysisParams): + """ + First stage of cfg constructor for modifiers. + + Args: + legacy_platform: + PlatformInfo from legacy target platform resolution, if one is specified + package_modifiers: + A list of modifiers specified from all parent PACKAGE files + target_modifier: + A list of modifiers specified from buildfile via `metadata` attribute. + cli_modifiers: + modifiers specified from `--modifier` flag, `?modifier`, or BXL + aliases: + A struct that contains mapping of modifier aliases to modifier. + extra_data: + Some extra data that is for extra logging/validation for our internal modifier implementation. + + Returns `(refs, PostConstraintAnalysisParams)`, where `refs` is a list of fully qualified configuration + targets we need providers for. + """ + package_modifiers = package_modifiers or [] + target_modifiers = target_modifiers or [] + + # Convert JSONs back to TaggedModifiers + package_modifiers = [json_to_tagged_modifiers(modifier_json) for modifier_json in package_modifiers] + + # Filter PACKAGE modifiers based on rule name. + # This only filters out PACKAGE modifiers from `extra_cfg_modifiers_per_rule` argument of `set_cfg_modifiers` function. + package_modifiers = [tagged_modifiers for tagged_modifiers in package_modifiers if tagged_modifiers.rule_name == None or tagged_modifiers.rule_name == rule_name] + merged_modifiers = package_modifiers + + # Add target modifiers as `TaggedModifiers` + if target_modifiers: + merged_modifiers.append(TaggedModifiers(modifiers = target_modifiers, location = ModifierTargetLocation(), rule_name = None)) + + # Resolve all aliases in CLI modifiers + cli_modifiers = [resolved_modifier for modifier in cli_modifiers for resolved_modifier in resolve_alias(modifier, aliases)] + + # Convert CLI modifiers to `TaggedModifier` + if cli_modifiers: + merged_modifiers.append(TaggedModifiers(modifiers = cli_modifiers, location = ModifierCliLocation(), rule_name = None)) + + refs = [] + for tagged_modifiers in merged_modifiers: + for modifier in tagged_modifiers.modifiers: + refs.extend(modifier_to_refs(modifier, tagged_modifiers.location)) + + return refs, PostConstraintAnalysisParams( + legacy_platform = legacy_platform, + merged_modifiers = merged_modifiers, + extra_data = extra_data, + ) + +def cfg_constructor_post_constraint_analysis( + *, + refs: dict[str, ProviderCollection], + params: PostConstraintAnalysisParams) -> PlatformInfo: + """ + Second stage of cfg constructor for modifiers. + + Args: + refs: a dictionary of fully qualified target labels for configuration targets with their providers + params: `PostConstraintAnalysisParams` returned from first stage of cfg constructor + + Returns a PlatformInfo + """ + + if not params.merged_modifiers: + # If there is no modifier and legacy platform is specified, + # then return the legacy platform as is without changing the label or + # configuration. + return params.legacy_platform or PlatformInfo( + # Empty configuration + label = "", + configuration = ConfigurationInfo( + constraints = {}, + values = {}, + ), + ) + + constraint_setting_to_modifier_infos = {} + cli_modifier_validation = getattr(params.extra_data, "cli_modifier_validation", None) + + if params.legacy_platform: + for constraint_setting, constraint_value_info in params.legacy_platform.configuration.constraints.items(): + constraint_setting_to_modifier_infos[constraint_setting] = [constraint_value_info] + + for tagged_modifiers in params.merged_modifiers: + for modifier in tagged_modifiers.modifiers: + if modifier: + constraint_setting_label, modifier_info = get_modifier_info( + refs = refs, + modifier = modifier, + location = tagged_modifiers.location, + ) + modifier_infos = constraint_setting_to_modifier_infos.get(constraint_setting_label) or [] + modifier_infos.append(modifier_info) + constraint_setting_to_modifier_infos[constraint_setting_label] = modifier_infos + + if isinstance(tagged_modifiers.location, ModifierCliLocation): + if cli_modifier_validation: + cli_modifier_validation(constraint_setting_label, modifier) + + # Modifiers are resolved in topological ordering of modifier selects. For example, if the CPU modifier + # is a modifier_select on OS constraint, then the OS modifier must be resolved before the CPU modifier. + # To determine this order, we first construct a dep graph of constraint settings based on the modifier + # selects. Then we perform a post order traversal of the said graph. + modifier_dep_graph = { + constraint_setting: [ + dep + for modifier_info in modifier_infos + for dep in get_constraint_setting_deps(modifier_info) + ] + for constraint_setting, modifier_infos in constraint_setting_to_modifier_infos.items() + } + + # For topo-sort, we need to fill in empty edges for nodes that have no deps + for deps in modifier_dep_graph.values(): + for dep in deps: + if dep not in modifier_dep_graph: + modifier_dep_graph[dep] = [] + + constraint_setting_order = post_order_traversal(modifier_dep_graph) + + cfg = ConfigurationInfo( + constraints = {}, + values = {}, + ) + + for constraint_setting in constraint_setting_order: + for modifier_info in constraint_setting_to_modifier_infos.get(constraint_setting) or (): + constraint_value = resolve_modifier(cfg, modifier_info) + if constraint_value: + cfg.constraints[constraint_setting] = constraint_value + + name = cfg_name(cfg) + return PlatformInfo( + label = name, + configuration = cfg, + ) diff --git a/prelude/cfg/modifier/common.bzl b/prelude/cfg/modifier/common.bzl new file mode 100644 index 00000000000..1d3b1f7a261 --- /dev/null +++ b/prelude/cfg/modifier/common.bzl @@ -0,0 +1,222 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:asserts.bzl", "asserts") +load(":asserts.bzl", "verify_normalized_modifier") +load( + ":types.bzl", + "ConditionalModifierInfo", + "Modifier", + "ModifierCliLocation", + "ModifierInfo", + "ModifierLocation", + "ModifierPackageLocation", + "ModifierTargetLocation", + "ModifiersMatchInfo", + "TaggedModifiers", + "is_modifiers_match", +) + +MODIFIER_METADATA_KEY = "buck.cfg_modifiers" + +_TARGET_LOCATION_STR = "`metadata` attribute of target" +_CLI_LOCATION_STR = "command line" + +def location_to_string(location: ModifierLocation) -> str: + if isinstance(location, ModifierPackageLocation): + return location.package_path + if isinstance(location, ModifierTargetLocation): + return _TARGET_LOCATION_STR + if isinstance(location, ModifierCliLocation): + return _CLI_LOCATION_STR + fail("Internal error. Unrecognized location type `{}` for location `{}`".format(type(location), location)) + +def get_tagged_modifiers( + cfg_modifiers: list[Modifier], + extra_cfg_modifiers_per_rule: dict[str, list[Modifier]], + location: ModifierLocation) -> list[TaggedModifiers]: + for modifier in cfg_modifiers: + verify_normalized_modifier(modifier) + for _, modifiers in extra_cfg_modifiers_per_rule.items(): + for modifier in modifiers: + verify_normalized_modifier(modifier) + + # Aggreggate all tagged modifiers in a PACKAGE in a single list. + # Per-rule modifiers come the global modifiers so that they are processed later. + return [ + TaggedModifiers( + modifiers = cfg_modifiers, + location = location, + rule_name = None, + ), + ] + [ + TaggedModifiers( + modifiers = modifiers, + location = location, + rule_name = rule_name, + ) + for rule_name, modifiers in extra_cfg_modifiers_per_rule.items() + ] + +def get_constraint_setting(constraint_settings: dict[TargetLabel, None], modifier: Modifier, location: ModifierLocation) -> TargetLabel: + if len(constraint_settings) == 0: + fail("`modifiers.match` cannot be empty. Found empty `modifiers.match` at `{}`".format(location_to_string(location))) + if len(constraint_settings) > 1: + fail( + "A single modifier can only modify a single constraint setting.\n" + + "Modifier `{}` from `{}` is found to modify the following constraint settings:\n".format( + modifier, + location_to_string(location), + ) + "\n".join([str(k) for k in constraint_settings.keys()]), + ) + return list(constraint_settings.keys())[0] + +def get_modifier_info( + refs: dict[str, ProviderCollection], + modifier: Modifier, + location: ModifierLocation) -> ((TargetLabel, ModifierInfo) | None): + # Gets a modifier info from a modifier based on providers from `refs`. + if modifier == None: + return None + if is_modifiers_match(modifier): + default = None + modifiers_match_info = [] + constraint_settings = {} # Used like a set + for key, sub_modifier in modifier.items(): + if key == "DEFAULT": + if sub_modifier: + default_constraint_setting, default = get_modifier_info(refs, sub_modifier, location) + constraint_settings[default_constraint_setting] = None + else: + default = None + elif key != "_type": + cfg_info = refs[key][ConfigurationInfo] + if sub_modifier: + sub_constraint_setting, sub_modifier_info = get_modifier_info(refs, sub_modifier, location) + constraint_settings[sub_constraint_setting] = None + else: + sub_modifier_info = None + modifiers_match_info.append((cfg_info, sub_modifier_info)) + + constraint_setting = get_constraint_setting(constraint_settings, modifier, location) + + return constraint_setting, ModifiersMatchInfo( + default = default, + selector = modifiers_match_info, + ) + if isinstance(modifier, str): + modifier_info = refs[modifier] + if ConditionalModifierInfo in modifier_info: + conditional_modifier_info = modifier_info[ConditionalModifierInfo] + return conditional_modifier_info.key, conditional_modifier_info.inner + cfg_info = modifier_info[ConfigurationInfo] + asserts.true(len(cfg_info.constraints) == 1, "Modifier should only be a single constraint value. Found multiple in `{}`".format(modifier)) + constraint_value_info = list(cfg_info.constraints.values())[0] + return constraint_value_info.setting.label, constraint_value_info + fail("Internal error: Found unexpected modifier `{}` type `{}`".format(modifier, type(modifier))) + +def _is_subset(a: ConfigurationInfo, b: ConfigurationInfo) -> bool: + for (constraint_setting, a_constraint_value) in a.constraints.items(): + b_constraint_value = b.constraints.get(constraint_setting) + if a_constraint_value != b_constraint_value: + return False + return True + +def resolve_modifier(cfg: ConfigurationInfo, modifier: ModifierInfo) -> ConstraintValueInfo | None: + # Resolve the modifier and return the constraint value to add to the configuration, if there is one + if modifier == None: + return None + if isinstance(modifier, ModifiersMatchInfo): + for key, sub_modifier in modifier.selector: + if _is_subset(key, cfg): + # If constraints in key of the select are a subset of the constraints in the + # current configuration, then it's a match. + return resolve_modifier(cfg, sub_modifier) + if modifier.default: + return resolve_modifier(cfg, modifier.default) + return None + if isinstance(modifier, ConstraintValueInfo): + return modifier + fail("Internal error: Found unexpected modifier `{}` type `{}`".format(modifier, type(modifier))) + +def modifier_to_refs(modifier: Modifier, location: ModifierLocation) -> list[str]: + # Obtain a list of targets to analyze from a modifier. + refs = [] + if modifier == None: + pass + elif is_modifiers_match(modifier): + for key, sub_modifier in modifier.items(): + if key != "_type": + if key != "DEFAULT": + refs.append(key) + refs.extend(modifier_to_refs(sub_modifier, location)) + elif isinstance(modifier, str): + refs.append(modifier) + else: + fail("Internal error: Found unexpected modifier `{}` type `{}`".format(modifier, type(modifier))) + return refs + +def tagged_modifiers_to_json(tagged_modifiers: TaggedModifiers) -> dict[str, typing.Any]: + return { + "location": _location_to_json(tagged_modifiers.location), + "modifiers": tagged_modifiers.modifiers, + "rule_name": tagged_modifiers.rule_name, + "_type": "TaggedModifiers", + } + +def _location_to_json(location: ModifierLocation) -> dict[str, str]: + if isinstance(location, ModifierPackageLocation): + return {"package_path": location.package_path, "_type": "ModifierPackageLocation"} + if isinstance(location, ModifierTargetLocation): + return {"_type": "ModifierTargetLocation"} + fail("Internal error: unknown location `{}` with type `{}`".format(location, type(location))) + +def json_to_tagged_modifiers(j: dict[str, typing.Any]) -> TaggedModifiers: + if j["_type"] != "TaggedModifiers": + fail("Internal error: `{}` is not a `TaggedModifiers`".format(j)) + return TaggedModifiers( + location = _json_to_location(j["location"]), + modifiers = j["modifiers"], + rule_name = j["rule_name"], + ) + +def _json_to_location(j: dict[str, str]) -> ModifierLocation: + modifier_type = j.pop("_type") + if modifier_type == "ModifierPackageLocation": + return ModifierPackageLocation(package_path = j["package_path"]) + if modifier_type == "ModifierTargetLocation": + return ModifierTargetLocation() + fail("Internal error: cannot deserialize location `{}`".format(j)) + +def resolve_alias(modifier: Modifier, aliases: struct) -> list[Modifier]: + if isinstance(modifier, ModifiersMatchInfo): + fail("It should not be possible to specify a conditional modifier from command line") + if ":" in modifier: + # This is a target and not an alias + return [modifier] + resolved = getattr(aliases, modifier, None) + if resolved: + return resolved if isinstance(resolved, list) else [resolved] + fail("Found invalid modifier alias `{}`. A list of valid modifier aliases is in buck2/cfg/experimental/alias.bzl".format(modifier)) + +def _get_constraint_setting_deps( + modifier_info: ModifierInfo) -> list[TargetLabel]: + deps = [] + if isinstance(modifier_info, ModifiersMatchInfo): + for key, sub_modifier in modifier_info.selector: + for constraint_setting in key.constraints: + deps.append(constraint_setting) + deps += _get_constraint_setting_deps(sub_modifier) + if modifier_info.default: + deps += _get_constraint_setting_deps(modifier_info.default) + return deps + +def get_constraint_setting_deps( + modifier_info: ModifierInfo) -> list[TargetLabel]: + # Get all constraint settings depended on by a modifier (from keys of `modifier_select`). The modifiers + # for these constraint settings must be resolved before this modifier can be resolved. + return dedupe(_get_constraint_setting_deps(modifier_info)) diff --git a/prelude/cfg/modifier/name.bzl b/prelude/cfg/modifier/name.bzl new file mode 100644 index 00000000000..79c1fb59ae8 --- /dev/null +++ b/prelude/cfg/modifier/name.bzl @@ -0,0 +1,55 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# This is an ordered dictionary of constraint setting target to an optional transform. +# The constraint settings listed below are used to name the configuration, where the first +# constraint settings are named first in the configuration. The transform, if specified, +# can change how the name appears in the configuration. By default, if there is no transform, +# we just use name of the constraint value in the configuration name. If transform is +# specified, the transform will be applied on the existing constraint value name to return a +# new string to be used in the configuration. +# @unsorted-dict-items +NAMED_CONSTRAINT_SETTINGS = { + # TODO(scottcao): Add OSS constraints as well + "ovr_config//os/constraints:os": None, + "ovr_config//cpu/constraints:cpu": None, + "ovr_config//runtime/constraints:runtime": None, + "ovr_config//runtime/constraints:runtime_version": None, + "ovr_config//os/sdk/apple/constraints:_": None, + "ovr_config//os/sdk/android/ndk/constraints:version": None, + "ovr_config//os/version/android/constraints:api-level": (lambda label: "api" + str(label.name).split("-")[-1]), + "ovr_config//toolchain/clang/constraints:clang-toolchain-version": (lambda label: "clang" + str(label.name)), + "ovr_config//build_mode/constraints:san": None, + "fbcode//fdo/constraints:fdo": (lambda label: str(label.name)), + "ovr_config//build_mode/default_opt_cxx:default_opt_cxx_setting": (lambda label: "opt-by-default" if str(label.name) == "enabled" else None), +} + +# Mark all modifier generated configurations with a `cfg:` prefix. +# We do this so that we can easily recognize which configuration is generated +# by modifiers and query for it in Scuba. +_CFG_PREFIX = "cfg:" +_EMPTY_CFG_NAME = _CFG_PREFIX + "" + +def cfg_name(cfg: ConfigurationInfo) -> str: + """Derives a reasonable name for a ConfigurationInfo""" + + name_list = [] + constraints = {str(key): value for key, value in cfg.constraints.items()} + for constraint_setting, transform in NAMED_CONSTRAINT_SETTINGS.items(): + if constraint_setting in constraints: + constraint_value_label = constraints[constraint_setting].label + if transform: + constraint_name = transform(constraint_value_label) + else: + constraint_name = str(constraint_value_label.name) + if constraint_name: + name_list.append(constraint_name) + if len(name_list) == 0: + name = _EMPTY_CFG_NAME + else: + name = _CFG_PREFIX + "-".join(name_list) + return name diff --git a/prelude/cfg/modifier/set_cfg_modifiers.bzl b/prelude/cfg/modifier/set_cfg_modifiers.bzl new file mode 100644 index 00000000000..4c6dcf3f07d --- /dev/null +++ b/prelude/cfg/modifier/set_cfg_modifiers.bzl @@ -0,0 +1,73 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:prelude.bzl", "native") +load(":common.bzl", "MODIFIER_METADATA_KEY", "get_tagged_modifiers", "tagged_modifiers_to_json") +load(":types.bzl", "Modifier", "ModifierPackageLocation") # @unused Used in type annotation + +def set_cfg_modifiers( + cfg_modifiers: list[Modifier] | None = None, + extra_cfg_modifiers_per_rule: dict[str, list[Modifier]] | None = None): + """ + Sets a configuration modifier for all targets under this PACKAGE file. This can only be called from a PACKAGE file context + (e.g. a PACKAGE file or a bzl file transitively loaded by a PACKAGE file). + + Args: + cfg_modifiers: + A list of modifiers to set. The simplest modifier is a constraint value target. + For example, to change the OS to linux in fbsource, this can be specified as `["ovr_config//os/constraints:linux"]`. + extra_cfg_modifiers_per_rule: + A dictionary of rule name to a list of modifiers to set. This is applied on top of modifiers from `cfg_modifiers` parameter + if a target's rule name matches the key, so it can override any modifier from `cfg_modifiers` parameter in the same PACKAGE. + For example, if this dictionary is `{"python_binary": ["ovr_config//os/constraints:macos"]}`, + then all python_binary targets covered will have the macos constraint added to their configurations. + """ + + # Make this buck1-proof + call_stack_frame = getattr(native, "call_stack_frame", None) + + # To ensure that modifiers set in PACKAGE files are easily codemoddable + # We want to enforce that `set_cfg_modifiers` is only invokable from a PACKAGE file and not a bzl file + module_path = call_stack_frame(1).module_path + if not module_path.endswith(("/PACKAGE", "/BUCK_TREE")) and module_path not in ("PACKAGE", "BUCK_TREE"): + fail("set_cfg_modifiers is only allowed to be used from PACKAGE files, not a bzl file") + + cfg_modifiers = cfg_modifiers or [] + extra_cfg_modifiers_per_rule = extra_cfg_modifiers_per_rule or {} + + # Make this buck1-proof + write_package_value = getattr(native, "write_package_value", None) + read_parent_package_value = getattr(native, "read_parent_package_value", None) + + merged_modifier_jsons = read_parent_package_value(MODIFIER_METADATA_KEY) + + # `read_parent_package_value` returns immutable values. `list()` makes it mutable. + merged_modifier_jsons = list(merged_modifier_jsons) if merged_modifier_jsons else [] + + tagged_modifiers_list = get_tagged_modifiers( + cfg_modifiers, + extra_cfg_modifiers_per_rule, + ModifierPackageLocation(package_path = _get_package_path()), + ) + merged_modifier_jsons += [tagged_modifiers_to_json(tagged_modifiers) for tagged_modifiers in tagged_modifiers_list] + + write_package_value( + MODIFIER_METADATA_KEY, + merged_modifier_jsons, + overwrite = True, + ) + +def _get_package_path() -> str: + """ + Returns the cell-relative path of the current PACKAGE file. + Ex. `foo//bar/PACKAGE` + """ + + # Make this buck1-proof + get_cell_name = getattr(native, "get_cell_name", None) + get_base_path = getattr(native, "get_base_path", None) + return "{}//{}/PACKAGE".format(get_cell_name(), get_base_path()) diff --git a/prelude/cfg/modifier/types.bzl b/prelude/cfg/modifier/types.bzl new file mode 100644 index 00000000000..f5b7e8a846e --- /dev/null +++ b/prelude/cfg/modifier/types.bzl @@ -0,0 +1,64 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# TODO(scottcao): Annotate these types with comments once implementation is complete + +# Metadata types for where cfg modifier is defined. We need to track this to give users error messages +# that include where the bad modifier comes from. + +# Modifier defined in a PACKAGE file. We track path of that PACKAGE file. +ModifierPackageLocation = record(package_path = str) + +# Modifier defined on the target in buildfile. +ModifierTargetLocation = record() + +# Modifier specified via command line from the user +ModifierCliLocation = record() + +# This is a handy way of specifying a rust-style enum in Starlark. +ModifierLocation = ModifierPackageLocation | ModifierTargetLocation | ModifierCliLocation + +# Modifier types as how they appear to the user via `set_cfg_modifier` or `cfg_modifier` function. + +ModifiersMatch = dict[str, typing.Any] + +Modifier = str | ModifiersMatch | None + +TaggedModifiers = record( + modifiers = list[Modifier], + location = ModifierLocation, + rule_name = str | None, +) + +# Modifier types after analysis of configuration rules. +# There is an equivalent post-constraint-analysis type for every modifier type listed above. +# An "Info" is added to the type name to denote post-constraint-analysis version of the +# modifier type. + +ModifiersMatchInfo = record( + # should be list[(ConfigurationInfo, "ModifierInfo")] once recursive types are supported + selector = list[(ConfigurationInfo, typing.Any)], + default = typing.Any, # should be "ModifierInfo" | None with recursive types +) + +ModifierInfo = ConstraintValueInfo | ModifiersMatchInfo | None + +# A provider for conditional modifier used by cfg constructor function when constructing the +# configuration +ConditionalModifierInfo = provider(fields = { + "inner": ModifierInfo, + "key": TargetLabel, +}) + +def is_modifiers_match(modifier: Modifier) -> bool: + if modifier == None or isinstance(modifier, str): + return False + if isinstance(modifier, dict): + if modifier["_type"] != "ModifiersMatch": + fail("Found unknown dictionary `{}` for modifier".format(modifier)) + return True + fail("Modifier should either be None, a string, or dict. Found `{}`".format(modifier)) diff --git a/prelude/command_alias.bzl b/prelude/command_alias.bzl index 1dcc7ea9ba9..b1ba3f70a12 100644 --- a/prelude/command_alias.bzl +++ b/prelude/command_alias.bzl @@ -58,15 +58,18 @@ def _command_alias_impl_target_unix(ctx, exec_is_windows: bool): exec_is_windows, ) - run_info_args = cmd_args() + run_info_args_args = [] + run_info_args_hidden = [] if len(ctx.attrs.env) > 0 or len(ctx.attrs.platform_exe.items()) > 0: - run_info_args.add(trampoline) - run_info_args.hidden(trampoline_args) + run_info_args_args.append(trampoline) + run_info_args_hidden.append(trampoline_args) else: - run_info_args.add(base.args) - run_info_args.add(ctx.attrs.args) + run_info_args_args.append(base.args) + run_info_args_args.append(ctx.attrs.args) - run_info_args.hidden(ctx.attrs.resources) + run_info_args_hidden.append(ctx.attrs.resources) + + run_info_args = cmd_args(run_info_args_args, hidden = run_info_args_hidden) return [ DefaultInfo(default_output = trampoline, other_outputs = [trampoline_args] + ctx.attrs.resources), @@ -86,6 +89,12 @@ def _command_alias_impl_target_windows(ctx, exec_is_windows: bool): trampoline_args = cmd_args() trampoline_args.add("@echo off") + if "close_stdin" in ctx.attrs.labels: + # Avoids waiting for input on the "Terminate batch job (Y/N)?" prompt. + # The prompt itself is unavoidable, but we can avoid having to wait for input. + # This will call the same trampoline batch file with stdin disabled + trampoline_args.add("if not defined STDIN_CLOSED (set STDIN_CLOSED=1 & CALL 0: - run_info_args.add(trampoline) - run_info_args.hidden(trampoline_args) + run_info_args_args.append(trampoline) + run_info_args_hidden.append(trampoline_args) else: - run_info_args.add(base.args) - run_info_args.add(ctx.attrs.args) + run_info_args_args.append(base.args) + run_info_args_args.append(ctx.attrs.args) - run_info_args.hidden(ctx.attrs.resources) + run_info_args_hidden.append(ctx.attrs.resources) + + run_info_args = cmd_args(run_info_args_args, hidden = run_info_args_hidden) return [ DefaultInfo(default_output = trampoline, other_outputs = [trampoline_args] + ctx.attrs.resources), @@ -148,7 +161,11 @@ def _relativize_path_unix( trampoline_args: cmd_args) -> Artifact: # FIXME(ndmitchell): more straightforward relativization with better API non_materialized_reference = ctx.actions.write("dummy", "") - trampoline_args.relative_to(non_materialized_reference, parent = 1).absolute_prefix("__BUCK_COMMAND_ALIAS_ABSOLUTE__/") + trampoline_args = cmd_args( + trampoline_args, + relative_to = (non_materialized_reference, 1), + absolute_prefix = "__BUCK_COMMAND_ALIAS_ABSOLUTE__/", + ) trampoline_tmp, _ = ctx.actions.write("__command_alias_trampoline.{}.pre".format(extension), trampoline_args, allow_args = True) @@ -178,7 +195,11 @@ def _relativize_path_windows( trampoline_args: cmd_args) -> Artifact: # FIXME(ndmitchell): more straightforward relativization with better API non_materialized_reference = ctx.actions.write("dummy", "") - trampoline_args.relative_to(non_materialized_reference, parent = 1).absolute_prefix(var + "/") + trampoline_args = cmd_args( + trampoline_args, + relative_to = (non_materialized_reference, 1), + absolute_prefix = var + "/", + ) trampoline, _ = ctx.actions.write("__command_alias_trampoline.{}".format(extension), trampoline_args, allow_args = True) @@ -209,7 +230,10 @@ def _add_args_declaration_to_trampoline_args(trampoline_args: cmd_args, base: Ru trampoline_args.add(")") -def _get_run_info_from_exe(exe: Dependency) -> RunInfo: +def _get_run_info_from_exe(exe: Dependency | Artifact) -> RunInfo: + if isinstance(exe, Artifact): + return RunInfo(args = cmd_args(exe)) + run_info = exe.get(RunInfo) if run_info == None: run_info = RunInfo( diff --git a/prelude/configurations/rules.bzl b/prelude/configurations/rules.bzl index 66536b8333b..9e80cd67199 100644 --- a/prelude/configurations/rules.bzl +++ b/prelude/configurations/rules.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//cfg/modifier:types.bzl", "ConditionalModifierInfo") load(":util.bzl", "util") # config_setting() accepts a list of constraint_values and a list of values @@ -38,9 +39,16 @@ def constraint_value_impl(ctx): DefaultInfo(), constraint_value, # Provide `ConfigurationInfo` from `constraint_value` so it could be used as select key. - ConfigurationInfo(constraints = { - constraint_value.setting.label: constraint_value, - }, values = {}), + ConfigurationInfo( + constraints = { + constraint_value.setting.label: constraint_value, + }, + values = {}, + ), + ConditionalModifierInfo( + inner = constraint_value, + key = constraint_value.setting.label, + ), ] # platform() declares a platform, it is a list of constraint values. diff --git a/prelude/cpu/BUCK.v2 b/prelude/cpu/BUCK.v2 index 30d019bfd98..ec7c7b4ed3d 100644 --- a/prelude/cpu/BUCK.v2 +++ b/prelude/cpu/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + config_setting( name = "x86_64", constraint_values = [ diff --git a/prelude/cpu/constraints/BUCK.v2 b/prelude/cpu/constraints/BUCK.v2 index 6a0e5dd00e1..1c7ba33a070 100644 --- a/prelude/cpu/constraints/BUCK.v2 +++ b/prelude/cpu/constraints/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + # Used by open source projects to support `prelude//` constraint_setting( @@ -24,6 +30,12 @@ constraint_value( visibility = ["PUBLIC"], ) +constraint_value( + name = "arm64_32", + constraint_setting = ":cpu", + visibility = ["PUBLIC"], +) + # Represents various flavors of ARM32, e.g., ARMv7k constraint_value( name = "arm32", diff --git a/prelude/csharp/csharp.bzl b/prelude/csharp/csharp.bzl index 7651b5763c1..53ddfb7c384 100644 --- a/prelude/csharp/csharp.bzl +++ b/prelude/csharp/csharp.bzl @@ -19,14 +19,14 @@ def csharp_library_impl(ctx: AnalysisContext) -> list[Provider]: library = ctx.actions.declare_output(dll_name) # Create a command invoking a wrapper script that calls csc.exe to compile the .dll. - cmd = cmd_args(toolchain.csc) + cmd = [toolchain.csc] # Add caller specified compiler flags. - cmd.add(ctx.attrs.compiler_flags) + cmd.append(ctx.attrs.compiler_flags) # Set the output target as a .NET library. - cmd.add("/target:library") - cmd.add(cmd_args( + cmd.append("/target:library") + cmd.append(cmd_args( library.as_output(), format = "/out:{}", )) @@ -34,29 +34,29 @@ def csharp_library_impl(ctx: AnalysisContext) -> list[Provider]: # Don't include any default .NET framework assemblies like "mscorlib" or "System" unless # explicitly requested with `/reference:{}`. This flag also stops injection of other # default compiler flags. - cmd.add("/noconfig") + cmd.append("/noconfig") # Don't reference mscorlib.dll unless asked for. This is required for targets that target # embedded platforms such as Silverlight or WASM. (Originally for Buck1 compatibility.) - cmd.add("/nostdlib") + cmd.append("/nostdlib") # Don't search any paths for .NET libraries unless explicitly referenced with `/lib:{}`. - cmd.add("/nosdkpath") + cmd.append("/nosdkpath") # Let csc know the directory path where it can find system assemblies. This is the path # that is searched by `/reference:{libname}` if `libname` is just a DLL name. - cmd.add(cmd_args(toolchain.framework_dirs[ctx.attrs.framework_ver], format = "/lib:{}")) + cmd.append(cmd_args(toolchain.framework_dirs[ctx.attrs.framework_ver], format = "/lib:{}")) # Add a `/reference:{name}` argument for each dependency. # Buck target refs should be absolute paths and system assemblies just the DLL name. child_deps = generate_target_tset_children(ctx.attrs.deps, ctx) deps_tset = ctx.actions.tset(DllDepTSet, children = child_deps) - cmd.add(deps_tset.project_as_args("reference")) + cmd.append(deps_tset.project_as_args("reference")) # Specify the C# source code files that should be compiled into this target. # NOTE: This must happen after /out and /target! - cmd.add(ctx.attrs.srcs) + cmd.append(ctx.attrs.srcs) # Run the C# compiler to produce the output artifact. ctx.actions.run(cmd, category = "csharp_compile") diff --git a/prelude/cxx/anon_link.bzl b/prelude/cxx/anon_link.bzl index 9da24b7ebb6..143f892eae1 100644 --- a/prelude/cxx/anon_link.bzl +++ b/prelude/cxx/anon_link.bzl @@ -10,7 +10,11 @@ load( "ArtifactInfo", "make_artifact_tset", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( @@ -34,7 +38,7 @@ def _serialize_linkable(linkable): return ("archive", ( (linkable.archive.artifact, linkable.archive.external_objects), linkable.link_whole, - linkable.linker_type, + linkable.linker_type.value, linkable.supports_lto, )) @@ -42,7 +46,7 @@ def _serialize_linkable(linkable): return ("objects", ( linkable.objects, linkable.link_whole, - linkable.linker_type, + linkable.linker_type.value, )) if isinstance(linkable, SharedLibLinkable): @@ -107,7 +111,7 @@ def _deserialize_linkable(linkable: (str, typing.Any)) -> typing.Any: external_objects = external_objects, ), link_whole = link_whole, - linker_type = linker_type, + linker_type = LinkerType(linker_type), supports_lto = supports_lto, ) @@ -116,7 +120,7 @@ def _deserialize_linkable(linkable: (str, typing.Any)) -> typing.Any: return ObjectsLinkable( objects = objects, link_whole = link_whole, - linker_type = linker_type, + linker_type = LinkerType(linker_type), ) if typ == "shared": @@ -138,7 +142,7 @@ def _deserialize_link_info(actions: AnalysisActions, label: Label, info) -> Link external_debug_info = make_artifact_tset( actions = actions, infos = [ - ArtifactInfo(label = label, artifacts = artifacts) + ArtifactInfo(label = label, artifacts = artifacts, tags = []) for _label, artifacts in external_debug_info ], ), @@ -207,7 +211,7 @@ ANON_ATTRS = { # ObjectsLinkable attrs.list(attrs.source()), # objects attrs.bool(), # link_whole - attrs.string(), # linker_type + attrs.enum(LinkerType.values()), # linker_type ), attrs.tuple( # ArchiveLinkable @@ -217,7 +221,7 @@ ANON_ATTRS = { attrs.list(attrs.source()), # external_objects ), attrs.bool(), # link_whole - attrs.string(), # linker_type + attrs.enum(LinkerType.values()), # linker_type attrs.bool(), # supports_lto ), attrs.tuple( diff --git a/prelude/cxx/archive.bzl b/prelude/cxx/archive.bzl index e594162957f..f3149df7c65 100644 --- a/prelude/cxx/archive.bzl +++ b/prelude/cxx/archive.bzl @@ -5,15 +5,17 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerInfo") +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerInfo", "LinkerType") load("@prelude//linking:link_info.bzl", "Archive") +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:utils.bzl", "value_or") load(":cxx_context.bzl", "get_cxx_toolchain_info") def _archive_flags( archiver_type: str, - linker_type: str, + linker_type: LinkerType, use_archiver_flags: bool, + symbol_table: bool, thin: bool) -> list[str]: if not use_archiver_flags: return [] @@ -33,15 +35,15 @@ def _archive_flags( # Suppress warning about creating a new archive. flags += "c" - # Run ranlib to generate symbol index for faster linking. - flags += "s" + # Run ranlib to generate symbol index for faster linking if requested. + flags += "s" if symbol_table else "S" # Generate thin archives. if thin: flags += "T" # GNU archivers support generating deterministic archives. - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): flags += "D" return [flags] @@ -56,6 +58,7 @@ def _archive(ctx: AnalysisContext, name: str, args: cmd_args, thin: bool, prefer archiver_type, toolchain.linker_info.type, toolchain.linker_info.use_archiver_flags, + toolchain.linker_info.archive_symbol_table, thin, )) if archiver_type == "windows" or archiver_type == "windows_clang": @@ -67,16 +70,34 @@ def _archive(ctx: AnalysisContext, name: str, args: cmd_args, thin: bool, prefer shell_quoted_args = cmd_args(args, quote = "shell") if toolchain.linker_info.use_archiver_flags and toolchain.linker_info.archiver_flags != None: shell_quoted_args.add(toolchain.linker_info.archiver_flags) - argfile, _ = ctx.actions.write(name + ".argsfile", shell_quoted_args, allow_args = True) - command.hidden([shell_quoted_args]) - command.add(cmd_args(["@", argfile], delimiter = "")) + + command.add(at_argfile( + actions = ctx.actions, + name = name + ".cxx_archive_argsfile", + args = shell_quoted_args, + allow_args = True, + )) else: command.add(args) + # By default, the archive header produced by `ar q` embeds the current unix + # timestamp. With the GNU archiver we use `ar qD` (above in _archive_flags) + # to make it produce a deterministic archive by zeroing the timestamp, but + # other archivers do not support such a flag. Some implementations, notably + # Xcode's, instead support zeroing the timestamp by way of an environment + # variable. + env = {"ZERO_AR_DATE": "1"} + category = "archive" if thin: category = "archive_thin" - ctx.actions.run(command, category = category, identifier = name, prefer_local = prefer_local) + ctx.actions.run( + command, + category = category, + identifier = name, + env = env, + prefer_local = prefer_local, + ) return archive_output def _archive_locally(ctx: AnalysisContext, linker_info: LinkerInfo) -> bool: @@ -90,15 +111,14 @@ def make_archive( ctx: AnalysisContext, name: str, objects: list[Artifact], - args: [cmd_args, None] = None) -> Archive: + hidden: list[Artifact] = []) -> Archive: if len(objects) == 0: fail("no objects to archive") - if args == None: - args = cmd_args(objects) - linker_info = get_cxx_toolchain_info(ctx).linker_info thin = linker_info.archive_contents == "thin" + object_args = cmd_args(objects, ignore_artifacts = not linker_info.archiver_reads_inputs) + args = cmd_args(object_args, hidden = hidden) archive = _archive(ctx, name, args, thin = thin, prefer_local = _archive_locally(ctx, linker_info)) # TODO(T110378125): use argsfiles for GNU archiver for long lists of objects. diff --git a/prelude/cxx/argsfiles.bzl b/prelude/cxx/argsfiles.bzl index 81dbdfeea78..b3f86278482 100644 --- a/prelude/cxx/argsfiles.bzl +++ b/prelude/cxx/argsfiles.bzl @@ -6,7 +6,6 @@ # of this source tree. ARGSFILES_SUBTARGET = "argsfiles" -ABS_ARGSFILES_SUBTARGET = "abs-argsfiles" # Information on argsfiles created for compilation. CompileArgsfile = record( @@ -15,7 +14,7 @@ CompileArgsfile = record( # This argsfile as a command form that would use the argsfile (includes dependent inputs). cmd_form = field(cmd_args), # Input args necessary for the argsfile to reference. - input_args = field(list[["artifacts", cmd_args]]), + input_args = field(list[cmd_args]), # Args as written to the argsfile (with shell quoting applied). args = field(cmd_args), # Args aggregated for the argsfile excluding file prefix args (excludes shell quoting). @@ -25,19 +24,19 @@ CompileArgsfile = record( CompileArgsfiles = record( # Relative path argsfiles used for build actions, mapped by extension. relative = field(dict[str, CompileArgsfile], default = {}), - # Absolute path argsfiles used for extra outputs, mapped by extension. - absolute = field(dict[str, CompileArgsfile], default = {}), + # Argsfiles used for Xcode integration, mapped by extension. + xcode = field(dict[str, CompileArgsfile], default = {}), ) def get_argsfiles_output(ctx: AnalysisContext, argsfile_by_ext: dict[str, CompileArgsfile], summary_name: str) -> DefaultInfo: argsfiles = [] - argsfile_names = cmd_args() + argsfile_names = [] dependent_outputs = [] for _, argsfile in argsfile_by_ext.items(): argsfiles.append(argsfile.file) - argsfile_names.add(cmd_args(argsfile.file).ignore_artifacts()) + argsfile_names.append(cmd_args(argsfile.file, ignore_artifacts = True)) dependent_outputs.extend(argsfile.input_args) - argsfiles_summary = ctx.actions.write(summary_name, argsfile_names) + argsfiles_summary = ctx.actions.write(summary_name, cmd_args(argsfile_names)) return DefaultInfo(default_outputs = [argsfiles_summary] + argsfiles, other_outputs = dependent_outputs) diff --git a/prelude/cxx/attr_selection.bzl b/prelude/cxx/attr_selection.bzl index 05e45227743..cfd83383809 100644 --- a/prelude/cxx/attr_selection.bzl +++ b/prelude/cxx/attr_selection.bzl @@ -32,7 +32,7 @@ def cxx_by_language_ext(x: dict[typing.Any, typing.Any], ext: str) -> list[typin # And you can see them in java code, but somehow it works with # this one, which is seem across the repo. Find out what's happening. key_compiler = "c_cpp_output" - elif ext in (".cpp", ".cc", ".cxx", ".c++"): + elif ext in (".cpp", ".cc", ".cxx", ".c++", ".bc"): key_pp = "cxx" key_compiler = "cxx_cpp_output" elif ext == ".m": diff --git a/prelude/cxx/bitcode.bzl b/prelude/cxx/bitcode.bzl index 782e8a30b33..ae95ea9d5c9 100644 --- a/prelude/cxx/bitcode.bzl +++ b/prelude/cxx/bitcode.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerInfo") +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:utils.bzl", "value_or") load(":cxx_context.bzl", "get_cxx_toolchain_info") @@ -35,14 +36,19 @@ def _bundle(ctx: AnalysisContext, name: str, args: cmd_args, prefer_local: bool) bundle_output = ctx.actions.declare_output(name) - argsfile, _ = ctx.actions.write(name + ".argsfile", args, allow_args = True) - - command = cmd_args(argsfile, format = "@{}", delimiter = "").hidden(args) - llvm_cmd = cmd_args(llvm_link) - llvm_cmd.add(command) - llvm_cmd.add("-v") - llvm_cmd.add("-o") - llvm_cmd.add(bundle_output.as_output()) + command = at_argfile( + actions = ctx.actions, + name = name + ".cxx_bitcode_argsfile", + args = args, + allow_args = True, + ) + llvm_cmd = cmd_args( + llvm_link, + command, + "-v", + "-o", + bundle_output.as_output(), + ) ctx.actions.run(llvm_cmd, category = "bitcode_bundle", identifier = name, prefer_local = prefer_local) return bundle_output @@ -70,7 +76,7 @@ def make_bitcode_bundle( if override and len(objects) > 1: args.add(objects[0]) overrides = cmd_args(objects[1:], format = "--override={}") - args.add(overrides).hidden(objects) + args.add(overrides) else: args.add(objects) diff --git a/prelude/cxx/comp_db.bzl b/prelude/cxx/comp_db.bzl index 449c80657f2..c0d81512929 100644 --- a/prelude/cxx/comp_db.bzl +++ b/prelude/cxx/comp_db.bzl @@ -7,6 +7,7 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo") +load("@prelude//utils:argfile.bzl", "at_argfile") load( ":compile.bzl", "CxxSrcCompileCommand", # @unused Used as a type @@ -15,7 +16,6 @@ load(":cxx_context.bzl", "get_cxx_toolchain_info") # Provider that exposes the compilation database information CxxCompilationDbInfo = provider(fields = { - "compdb": provider_field(typing.Any, default = None), # path customly built compile_commands.json (used by Zephyr projects) "info": provider_field(typing.Any, default = None), # A map of the file (an `Artifact`) to its corresponding `CxxSrcCompileCommand` "platform": provider_field(typing.Any, default = None), # platform for this compilation database "toolchain": provider_field(typing.Any, default = None), # toolchain for this compilation database @@ -32,10 +32,7 @@ def create_compilation_database( ctx: AnalysisContext, src_compile_cmds: list[CxxSrcCompileCommand], identifier: str) -> DefaultInfo: - mk_comp_db = get_cxx_toolchain_info(ctx).mk_comp_db - if mk_comp_db == None: - return DefaultInfo() - mk_comp_db = mk_comp_db[RunInfo] + mk_comp_db = get_cxx_toolchain_info(ctx).internal_tools.make_comp_db # Generate the per-source compilation DB entries. entries = {} @@ -50,7 +47,7 @@ def create_compilation_database( "gen", cmd_args(entry.as_output(), format = "--output={}"), src_compile_cmd.src.basename, - cmd_args(src_compile_cmd.src).parent(), + cmd_args(src_compile_cmd.src, parent = 1), "--", src_compile_cmd.cxx_compile_cmd.base_compile_cmd, src_compile_cmd.cxx_compile_cmd.argsfile.cmd_form, @@ -63,18 +60,17 @@ def create_compilation_database( other_outputs.append(cmd) entries[cdb_path] = entry - content = cmd_args(*entries.values()) - - argfile = ctx.actions.declare_output(paths.join(identifier, "comp_db.argsfile")) - ctx.actions.write(argfile.as_output(), content) - # Merge all entries into the actual compilation DB. db = ctx.actions.declare_output(paths.join(identifier, "compile_commands.json")) cmd = cmd_args(mk_comp_db) cmd.add("merge") cmd.add(cmd_args(db.as_output(), format = "--output={}")) - cmd.add(cmd_args(argfile, format = "@{}")) - cmd.hidden(entries.values()) + cmd.add(at_argfile( + actions = ctx.actions, + name = identifier + ".cxx_comp_db_argsfile", + args = entries.values(), + )) + ctx.actions.run(cmd, category = "cxx_compilation_database_merge", identifier = identifier) return DefaultInfo(default_output = db, other_outputs = other_outputs) diff --git a/prelude/cxx/compilation_database_labels.bzl b/prelude/cxx/compilation_database_labels.bzl new file mode 100644 index 00000000000..ee064331e5e --- /dev/null +++ b/prelude/cxx/compilation_database_labels.bzl @@ -0,0 +1,72 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Handles labels used to provide compilation database information for filegroup() and genrule() targets. + +Our language services need to know how to compile files owned solely by filegroup() or genrule() targets like: +* Regular generated sources, that then end up being compiled by regular cxx_ or apple_ targets. +* Manually declared mixin files, that are always compiled by multiple other targets spread across the codebase. +* Files built by external build systems wrapped in genrules(), where compile_commands.json is produced by yet another genrule(). + +Prior approach for the former two cases was to run rdeps() queries to find a compilable target that would have a compile_commands.json entry for the file. +It suffered from reliability and performance issues, as the universe for rdeps() queries had to be quite broad and with no guarantee that there isn't even a single broken target within it. +And for external build system wrappers where there is no compilable target, we could define a rule that would effectively wrap two genrules and expose one of them as [compilation-database] subtarget, +but that wouldn't solve the problem with mixins which is still relevant with external build systems and would put us in the same suboptimal spot in terms of performance and reliability. + +As the IDE needs to operate in O(changes) instead of O(repo), and open files even if some other corner of the repo is broken. +We need to make things both reliable and performant in an ever-growing codebase with a CI that explicitly cannot guarantee that the entire repo is green, and where rdeps() queries are thus flaky and slow. + +And as the IDE needs to react to any local changes and act consistently with local checkout, we cannot simply use a remote cache for rdeps() queries that are slow and flaky. + +So the solution is instead to localize the required information within the target, and directly point to the build system rules that provide compile_commands.json for the target. +""" + +def compilation_database_rules(source_mapping: dict[str, list[str]] | list[str]) -> list[str]: + """ + Takes a mapping from sources to the rules to be used to build compilation databases for those sources. + + Tooling like IDEs needs to obtain compile commands for source files that are exported by filegroup() to be built as part of another target, or are built with an external build system wrapped in a genrule(). + Labels provide a convenient way to link the non-compileable target with a rule that produces a compilation database for its sources: + ``` + load("@prelude//cxx:compilation_database_labels.bzl", "compilation_database_rules") + + # The shorthand way for most cases: + export_file( + name = "gadget_to_be_compiled_as_part_of_another_target.cpp", + labels = compilation_database_rules([ + "//path/to/some/dependent:target", + "//path/to/another/dependent:target", + ]) + ) + + # A per-source mapping for cases when the generated files from one genrule() are compiled in different targets and never together: + genrule( + name = "multiple_gadgets_for_different_purposes", + labels = compilation_database_rules({ + "server_gen.cpp": ["//path/to/dependent/module:server"], + "client_gen.cpp": ["//path/to/dependent/module:client"], + }) + ) + ``` + The tooling can use a BXL script to check the target kind and extract the compilation database rule from its labels. And then iterate over the resulting compilation database and resolve the symlinks in 'file' entries in order to find the matching entry for the original source. + + :param dict[str,str]|list[str] source_mapping: A mapping with source file name regex as key and target as value. The target has to be either a target with [compilation-database] subtarget, or a genrule that produces compile_commands.json (for wrapping external build systems). + """ + if not isinstance(source_mapping, dict): + source_mapping = {".*": source_mapping} + return ["compilation_database_rules=" + json.encode(source_mapping)] + +def get_compilation_database_rules(labels = list[str]) -> dict[str, list[str]] | None: + """ + Retrieves and decodes compilation database targets from target labels, if any. + """ + for label in labels: + value = label.removeprefix("compilation_database_rules=") + if value != label: + return json.decode(value) + return None diff --git a/prelude/cxx/compile.bzl b/prelude/cxx/compile.bzl index 785c93b4f2d..d5889e4689e 100644 --- a/prelude/cxx/compile.bzl +++ b/prelude/cxx/compile.bzl @@ -9,7 +9,6 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//linking:lto.bzl", "LtoMode") -load("@prelude//utils:set.bzl", "set") load( "@prelude//utils:utils.bzl", "flatten", @@ -25,10 +24,13 @@ load( "get_pic_flags", ) load(":cxx_context.bzl", "get_cxx_toolchain_info") +load(":cxx_sources.bzl", "CxxSrcWithFlags") load(":cxx_toolchain_types.bzl", "CxxObjectFormat", "DepTrackingMode") +load(":cxx_types.bzl", "CxxRuleConstructorParams") load(":debug.bzl", "SplitDebugMode") load( ":headers.bzl", + "CHeader", "CPrecompiledHeaderInfo", ) load(":platform.bzl", "cxx_by_platform") @@ -36,11 +38,20 @@ load( ":preprocessor.bzl", "CPreprocessor", # @unused Used as a type "CPreprocessorInfo", # @unused Used as a type - "cxx_attr_preprocessor_flags", + "HeaderUnit", # @unused Used as a type "cxx_merge_cpreprocessors", "get_flags_for_compiler_type", ) +# Supported assembly extensions +AsmExtensions = enum( + ".s", + ".sx", + ".S", + ".asm", + ".asmpp", +) + # Supported Cxx file extensions CxxExtension = enum( ".cpp", @@ -48,20 +59,17 @@ CxxExtension = enum( ".cxx", ".c++", ".c", - ".s", - ".sx", - ".S", ".m", ".mm", ".cu", ".hip", - ".asm", - ".asmpp", ".h", ".hpp", ".hh", ".h++", ".hxx", + ".bc", + *AsmExtensions.values() ) # Header files included in compilation databases @@ -102,6 +110,12 @@ _CxxCompileCommand = record( base_compile_cmd = field(cmd_args), # The argsfile of arguments from the rule and it's dependencies. argsfile = field(CompileArgsfile), + # The argsfile to use for Xcode integration. + xcode_argsfile = field(CompileArgsfile), + # The argsfile containing exported header units args (for precompilation). + header_units_argsfile = field(CompileArgsfile | None), + # The argsfile containing all header units args (for actual compilation). + private_header_units_argsfile = field(CompileArgsfile | None), headers_dep_files = field([_HeadersDepFiles, None]), compiler_type = field(str), # The action category @@ -122,29 +136,35 @@ CxxSrcCompileCommand = record( args = field(list[typing.Any]), # Is this a header file? is_header = field(bool, False), + # The index store factory to use to generate index store for this source file. + index_store_factory = field(typing.Callable | None, None), + error_handler = field([typing.Callable, None], None), +) + +_CxxSrcPrecompileCommand = record( + # Source file to compile. + src = field(Artifact), + # The CxxCompileCommand to use to compile this file. + cxx_compile_cmd = field(_CxxCompileCommand), + # Arguments specific to the source file. + args = field(list[typing.Any]), + # Extra argsfile to include after any other header units argsfile but before the + # main argsfiles. + extra_argsfile = field([CompileArgsfile, None], None), ) # Output of creating compile commands for Cxx source files. CxxCompileCommandOutput = record( # List of compile commands for each source file. src_compile_cmds = field(list[CxxSrcCompileCommand], default = []), + # Base compile commands for each source file extension. + base_compile_cmds = field(dict[CxxExtension, _CxxCompileCommand], default = {}), # Argsfiles generated for compiling these source files. argsfiles = field(CompileArgsfiles, default = CompileArgsfiles()), # List of compile commands for use in compilation database generation. comp_db_compile_cmds = field(list[CxxSrcCompileCommand], default = []), ) -# An input to cxx compilation, consisting of a file to compile and optional -# file specific flags to compile with. -CxxSrcWithFlags = record( - file = field(Artifact), - flags = field(list[ResolvedStringWithMacros], []), - # If we have multiple source entries with same files but different flags, - # specify an index so we can differentiate them. Otherwise, use None. - index = field([int, None], None), - is_header = field(bool, False), -) - CxxCompileOutput = record( # The compiled `.o` file. object = field(Artifact), @@ -153,12 +173,27 @@ CxxCompileOutput = record( # Externally referenced debug info, which doesn't get linked with the # object (e.g. the above `.o` when using `-gsplit-dwarf=single` or the # the `.dwo` when using `-gsplit-dwarf=split`). - external_debug_info = field([Artifact, None], None), - clang_remarks = field([Artifact, None], None), - clang_trace = field([Artifact, None], None), + external_debug_info = field(Artifact | None, None), + clang_remarks = field(Artifact | None, None), + clang_trace = field(Artifact | None, None), + gcno_file = field(Artifact | None, None), + index_store = field(Artifact | None, None), + assembly = field(Artifact | None, None), + diagnostics = field(Artifact | None, None), + preproc = field(Artifact | None, None), ) -_ABSOLUTE_ARGSFILE_SUBSTITUTIONS = [ +CxxCompileFlavor = enum( + # Default compilation witout alterations + "default", + # Produces position independent compile outputs + "pic", + # Produces position independent compile outputs + # using optimization flags from toolchain + "pic_optimized", +) + +_XCODE_ARG_SUBSTITUTION = [ (regex("-filter-error=.+"), "-fcolor-diagnostics"), (regex("-filter-ignore=.+"), "-fcolor-diagnostics"), (regex("-filter-warning=.+"), "-fcolor-diagnostics"), @@ -166,64 +201,92 @@ _ABSOLUTE_ARGSFILE_SUBSTITUTIONS = [ # @oss-disable: (regex("-fpika-runtime-checks"), "-fcolor-diagnostics"), ] -def get_extension_for_header(header_extension: str) -> str | None: +def get_source_extension_for_header(header_extension: str, default: CxxExtension) -> CxxExtension: """ Which source file extension to use to get compiler flags for the header. """ if header_extension in (".hpp", ".hh", ".h++", ".hxx"): - return ".cpp" + return CxxExtension(".cpp") elif header_extension == ".cuh": - return ".cu" + return CxxExtension(".cu") elif header_extension not in HeaderExtension.values(): - return header_extension # a file in `headers` has a source extension + return CxxExtension(header_extension) # a file in `headers` has a source extension else: - return None + return default -def get_extension_for_plain_headers(srcs: list[CxxSrcWithFlags]) -> str | None: +def get_source_extension(src: CxxSrcWithFlags, default_for_headers: CxxExtension) -> CxxExtension: """ - For a given list source files determine which source file extension - to use to get compiler flags for plain .h headers. + Which source files extension to use for a source or a header file. We want + headers to appear as though they are source files. """ + if src.is_header: + return get_source_extension_for_header(src.file.extension, default_for_headers) + else: + return CxxExtension(src.file.extension) + +def collect_extensions(srcs: list[CxxSrcWithFlags]) -> set[CxxExtension]: + """ + Collect extensions of source files while doing light normalization. + """ + duplicates = { ".c++": ".cpp", ".cc": ".cpp", ".cxx": ".cpp", } - extensions = set([duplicates.get(src.file.extension, src.file.extension) for src in srcs]) - - # Assembly doesn't need any special handling as included files tend to have .asm extension themselves. - # And the presence of assembly in the target doesn't tell us anything about the language of .h files. - for asm_ext in [".s", ".S", ".asm", ".asmpp"]: - extensions.remove(asm_ext) + extensions = set([CxxExtension(duplicates.get(src.file.extension, src.file.extension)) for src in srcs]) + return extensions - if extensions.size() == 0: - return None - if extensions.size() == 1: - return extensions.list()[0] - if extensions.contains(".hip"): - return ".hip" - if extensions.contains(".cu"): - return ".cu" - if extensions.contains(".mm"): - return ".mm" - if extensions.contains(".cpp") and extensions.contains(".m"): - return ".mm" - if extensions.contains(".cpp"): - return ".cpp" - if extensions.contains(".m"): - return ".m" - return ".c" - -def get_default_extension_for_plain_header(rule_type: str) -> str: +def default_source_extension_for_plain_header(rule_type: str) -> CxxExtension: """ Returns default source file extension to use to get get compiler flags for plain .h headers. """ # Default to (Objective-)C++ instead of plain (Objective-)C as it is more likely to be compatible with both. - return ".mm" if rule_type.startswith("apple_") else ".cpp" + return CxxExtension(".mm") if rule_type.startswith("apple_") else CxxExtension(".cpp") + +def detect_source_extension_for_plain_headers(exts: set[CxxExtension], rule_type: str) -> CxxExtension: + """ + For a given list source files determine which source file extension + to use to get compiler flags for plain .h headers. + """ -def get_header_language_mode(source_extension: str) -> str | None: + # Assembly doesn't need any special handling as included files tend to have .asm extension themselves. + # And the presence of assembly in the target doesn't tell us anything about the language of .h files. + for asm_ext in AsmExtensions: + exts.discard(asm_ext) + + if len(exts) == 0: + return default_source_extension_for_plain_header(rule_type) + + if len(exts) == 1: + return exts.pop() + if CxxExtension(".hip") in exts: + return CxxExtension(".hip") + if CxxExtension(".cu") in exts: + return CxxExtension(".cu") + if CxxExtension(".mm") in exts: + return CxxExtension(".mm") + if CxxExtension(".cpp") in exts and CxxExtension(".m") in exts: + return CxxExtension(".mm") + if CxxExtension(".cpp") in exts: + return CxxExtension(".cpp") + if CxxExtension(".m") in exts: + return CxxExtension(".m") + return CxxExtension(".c") + +def collect_source_extensions( + srcs: list[CxxSrcWithFlags], + default_for_headers: CxxExtension) -> set[CxxExtension]: + """ + Return unique source extensions from a list of source and header files where + header extensions are mapped to corresponding source extensions. + """ + source_extensions = set([get_source_extension(src, default_for_headers) for src in srcs]) + return source_extensions + +def get_header_language_mode(source_extension: CxxExtension) -> str | None: """ Returns the header mode to use for plain .h headers based on the source file extension used to obtain the compiler flags for them. @@ -231,26 +294,28 @@ def get_header_language_mode(source_extension: str) -> str | None: # Note: CUDA doesn't have its own header language mode, but the headers have distinct .cuh extension. modes = { - ".cpp": "c++-header", - ".m": "objective-c-header", - ".mm": "objective-c++-header", + CxxExtension(".cpp"): "c++-header", + CxxExtension(".m"): "objective-c-header", + CxxExtension(".mm"): "objective-c++-header", } return modes.get(source_extension) def create_compile_cmds( ctx: AnalysisContext, - # TODO(nga): this is `CxxRuleConstructorParams`, - # but there's dependency cycle between `compile.bzl` (this file) - # and `cxx_types.bzl` (where `CxxRuleConstructorParams` is defined). - impl_params: typing.Any, + impl_params: CxxRuleConstructorParams, own_preprocessors: list[CPreprocessor], - inherited_preprocessor_infos: list[CPreprocessorInfo]) -> CxxCompileCommandOutput: + inherited_preprocessor_infos: list[CPreprocessorInfo], + add_coverage_instrumentation_compiler_flags: bool, + header_preprocessor_info: CPreprocessorInfo = CPreprocessorInfo()) -> CxxCompileCommandOutput: """ Forms the CxxSrcCompileCommand to use for each source file based on it's extension and optional source file flags. Returns CxxCompileCommandOutput containing an array of the generated compile commands and argsfile output. """ + srcs_extensions = collect_extensions(impl_params.srcs) + extension_for_plain_headers = detect_source_extension_for_plain_headers(srcs_extensions, impl_params.rule_type) + srcs_with_flags = [] # type: [CxxSrcWithFlags] for src in impl_params.srcs: @@ -285,87 +350,324 @@ def create_compile_cmds( ) headers_tag = ctx.actions.artifact_tag() - abs_headers_tag = ctx.actions.artifact_tag() # This headers tag is just for convenience use in _mk_argsfile and is otherwise unused. src_compile_cmds = [] hdr_compile_cmds = [] - cxx_compile_cmd_by_ext = {} - argsfile_by_ext = {} - abs_argsfile_by_ext = {} + cxx_compile_cmd_by_ext = {} # type: dict[CxxExtension, _CxxCompileCommand] + argsfile_by_ext = {} # type: dict[str, CompileArgsfile] + xcode_argsfile_by_ext = {} # type: dict[str, CompileArgsfile] + + src_extensions = collect_source_extensions(srcs_with_flags, extension_for_plain_headers) + + # Deduplicate shared arguments to save memory. If we compile multiple files + # of the same extension they will have some of the same flags. Save on + # allocations by caching and reusing these objects. + for ext in src_extensions: + cmd = _generate_base_compile_command(ctx, impl_params, pre, header_preprocessor_info, headers_tag, ext) + cxx_compile_cmd_by_ext[ext] = cmd + argsfile_by_ext[ext.value] = cmd.argsfile + xcode_argsfile_by_ext[ext.value] = cmd.xcode_argsfile + + # only specify error_handler if one exists + error_handler_args = {} + if impl_params.error_handler: + error_handler_args["error_handler"] = impl_params.error_handler - extension_for_plain_headers = get_extension_for_plain_headers(impl_params.srcs) - extension_for_plain_headers = extension_for_plain_headers or get_default_extension_for_plain_header(impl_params.rule_type) for src in srcs_with_flags: - # We want headers to appear as though they are source files. - extension_for_header = get_extension_for_header(src.file.extension) or extension_for_plain_headers - ext = CxxExtension(extension_for_header if src.is_header else src.file.extension) - - # Deduplicate shared arguments to save memory. If we compile multiple files - # of the same extension they will have some of the same flags. Save on - # allocations by caching and reusing these objects. - if not ext in cxx_compile_cmd_by_ext: - toolchain = get_cxx_toolchain_info(ctx) - compiler_info = _get_compiler_info(toolchain, ext) - base_compile_cmd = _get_compile_base(compiler_info) - category = _get_category(ext) - - headers_dep_files = None - dep_file_file_type_hint = _dep_file_type(ext) - if dep_file_file_type_hint != None and toolchain.use_dep_files: - tracking_mode = _get_dep_tracking_mode(toolchain, dep_file_file_type_hint) - mk_dep_files_flags = get_headers_dep_files_flags_factory(tracking_mode) - if mk_dep_files_flags: - headers_dep_files = _HeadersDepFiles( - processor = cmd_args(compiler_info.dep_files_processor), - mk_flags = mk_dep_files_flags, - tag = headers_tag, - dep_tracking_mode = tracking_mode, - ) - - argsfile_by_ext[ext.value] = _mk_argsfile(ctx, compiler_info, pre, ext, headers_tag, False) - abs_argsfile_by_ext[ext.value] = _mk_argsfile(ctx, compiler_info, pre, ext, abs_headers_tag, True) - - allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs, default = compiler_info.allow_cache_upload) - cxx_compile_cmd_by_ext[ext] = _CxxCompileCommand( - base_compile_cmd = base_compile_cmd, - argsfile = argsfile_by_ext[ext.value], - headers_dep_files = headers_dep_files, - compiler_type = compiler_info.compiler_type, - category = category, - allow_cache_upload = allow_cache_upload, - ) + src_args = [] + src_args.extend(src.flags) + ext = get_source_extension(src, extension_for_plain_headers) cxx_compile_cmd = cxx_compile_cmd_by_ext[ext] - src_args = [] - src_args.extend(src.flags) + if add_coverage_instrumentation_compiler_flags and cxx_compile_cmd.compiler_type != "gcc": + src_args.extend(ctx.attrs.coverage_instrumentation_compiler_flags) + if src.is_header: - language_mode = get_header_language_mode(extension_for_header) - src_args.extend(["-x", language_mode] if language_mode else []) - src_args.extend(["-c", src.file]) + if cxx_compile_cmd.compiler_type in ["clang", "clang_windows", "gcc"]: + language_mode = get_header_language_mode(ext) + src_args.extend(["-x", language_mode] if language_mode else []) + elif cxx_compile_cmd.compiler_type in ["clang_cl", "windows", "windows_ml64"] and ext == CxxExtension(".cpp"): + src_args.append("/TP") + + if cxx_compile_cmd.compiler_type != "nasm": + src_args.append("-c") + src_args.append(src.file) - src_compile_command = CxxSrcCompileCommand(src = src.file, cxx_compile_cmd = cxx_compile_cmd, args = src_args, index = src.index, is_header = src.is_header) + src_compile_command = CxxSrcCompileCommand(src = src.file, cxx_compile_cmd = cxx_compile_cmd, args = src_args, index = src.index, is_header = src.is_header, index_store_factory = impl_params.index_store_factory, **error_handler_args) if src.is_header: hdr_compile_cmds.append(src_compile_command) else: src_compile_cmds.append(src_compile_command) argsfile_by_ext.update(impl_params.additional.argsfiles.relative) - abs_argsfile_by_ext.update(impl_params.additional.argsfiles.absolute) + xcode_argsfile_by_ext.update(impl_params.additional.argsfiles.xcode) return CxxCompileCommandOutput( src_compile_cmds = src_compile_cmds, + base_compile_cmds = cxx_compile_cmd_by_ext, argsfiles = CompileArgsfiles( relative = argsfile_by_ext, - absolute = abs_argsfile_by_ext, + xcode = xcode_argsfile_by_ext, ), comp_db_compile_cmds = src_compile_cmds + hdr_compile_cmds, ) +def _compile_index_store(ctx: AnalysisContext, src_compile_cmd: CxxSrcCompileCommand, toolchain: CxxToolchainInfo, compile_cmd: cmd_args, pic: bool) -> Artifact | None: + if src_compile_cmd.index_store_factory: + return src_compile_cmd.index_store_factory(ctx, src_compile_cmd, toolchain, compile_cmd, pic) + return None + +def _compile_single_cxx( + ctx: AnalysisContext, + toolchain: CxxToolchainInfo, + default_object_format: CxxObjectFormat, + bitcode_args: cmd_args, + optimization_flags: list, + src_compile_cmd: CxxSrcCompileCommand, + pic: bool, + provide_syntax_only: bool, + use_header_units: bool) -> CxxCompileOutput: + """ + Construct a final compile command for a single CXX source based on + `src_compile_command` and other compilation options. + """ + + short_path = src_compile_cmd.src.short_path + if src_compile_cmd.index != None: + # Add a unique postfix if we have duplicate source files with different flags + short_path = short_path + "_" + str(src_compile_cmd.index) + + filename_base = short_path + (".pic" if pic else "") + identifier = short_path + (" (pic)" if pic else "") + + if optimization_flags: + identifier += " (optimized) " + + filename_base = filename_base + (".optimized" if optimization_flags else "") + object = ctx.actions.declare_output( + "__objects__", + "{}.{}".format(filename_base, toolchain.linker_info.object_file_extension), + ) + + compiler_type = src_compile_cmd.cxx_compile_cmd.compiler_type + cmd = _get_base_compile_cmd( + bitcode_args = bitcode_args, + src_compile_cmd = src_compile_cmd, + pic = pic, + use_header_units = use_header_units, + output_args = cmd_args(get_output_flags(compiler_type, object)), + ) + cmd.add(cmd_args(optimization_flags)) + + action_dep_files = {} + + headers_dep_files = src_compile_cmd.cxx_compile_cmd.headers_dep_files + if headers_dep_files: + dep_file = ctx.actions.declare_output( + paths.join("__dep_files__", filename_base), + ).as_output() + + processor_flags, compiler_flags = headers_dep_files.mk_flags(ctx.actions, filename_base, src_compile_cmd.src) + cmd.add(compiler_flags) + + # API: First argument is the dep file source path, second is the + # dep file destination path, other arguments are the actual compile + # command. + cmd = cmd_args([ + headers_dep_files.processor, + headers_dep_files.dep_tracking_mode.value, + processor_flags, + headers_dep_files.tag.tag_artifacts(dep_file), + cmd, + ]) + + action_dep_files["headers"] = headers_dep_files.tag + + clang_remarks = None + if toolchain.clang_remarks and compiler_type == "clang": + cmd.add(["-fsave-optimization-record", "-fdiagnostics-show-hotness", "-foptimization-record-passes=" + toolchain.clang_remarks]) + clang_remarks = ctx.actions.declare_output( + paths.join("__objects__", "{}.opt.yaml".format(filename_base)), + ) + cmd.add(cmd_args(hidden = clang_remarks.as_output())) + + clang_trace = None + if toolchain.clang_trace and compiler_type == "clang": + cmd.add(["-ftime-trace"]) + clang_trace = ctx.actions.declare_output( + paths.join("__objects__", "{}.json".format(filename_base)), + ) + cmd.add(cmd_args(hidden = clang_trace.as_output())) + + gcno_file = None + if toolchain.gcno_files and src_compile_cmd.src.extension not in (".S", ".sx"): + cmd.add(["--coverage"]) + gcno_file = ctx.actions.declare_output( + paths.join("__objects__", "{}.gcno".format(filename_base)), + ) + cmd.add(cmd_args(hidden = gcno_file.as_output())) + + # only specify error_handler if one exists + error_handler_args = {} + if src_compile_cmd.error_handler: + error_handler_args["error_handler"] = src_compile_cmd.error_handler + + ctx.actions.run( + cmd, + category = src_compile_cmd.cxx_compile_cmd.category, + identifier = identifier, + dep_files = action_dep_files, + allow_cache_upload = src_compile_cmd.cxx_compile_cmd.allow_cache_upload, + allow_dep_file_cache_upload = False, + **error_handler_args + ) + + # If we're building with split debugging, where the debug info is in the + # original object, then add the object as external debug info + # FIXME: ThinLTO generates debug info in a separate dwo dir, but we still + # need to track object files if the object file is not compiled to bitcode. + # We should track whether ThinLTO is used on a per-object basis rather than + # globally on a toolchain level. + object_has_external_debug_info = ( + toolchain.split_debug_mode == SplitDebugMode("single") + ) + + # .S extension is native assembly code (machine level, processor specific) + # and clang will happily compile them to .o files, but the object are always + # native even if we ask for bitcode. If we don't mark the output format, + # other tools would try and parse the .o file as LLVM-IR and fail. + if src_compile_cmd.src.extension in [".S", ".s"]: + object_format = CxxObjectFormat("native") + else: + object_format = default_object_format + + compile_index_store_cmd = _get_base_compile_cmd( + bitcode_args = bitcode_args, + src_compile_cmd = src_compile_cmd, + pic = pic, + ) + index_store = _compile_index_store(ctx, src_compile_cmd, toolchain, compile_index_store_cmd, pic) + + # Generate asm for compiler which accept `-S` (TODO: support others) + if compiler_type in ["clang", "gcc"]: + # Generate assembler or llvm bitcode output file + assembly_extension = "s" + if compiler_type == "clang" and object_format == CxxObjectFormat("bitcode"): + assembly_extension = "ll" + assembly = ctx.actions.declare_output( + "__assembly__", + "{}.{}".format(filename_base, assembly_extension), + ) + assembly_cmd = _get_base_compile_cmd( + bitcode_args = bitcode_args, + src_compile_cmd = src_compile_cmd, + pic = pic, + output_args = cmd_args("-S", get_output_flags(compiler_type, assembly)), + ) + ctx.actions.run( + assembly_cmd, + category = src_compile_cmd.cxx_compile_cmd.category, + identifier = identifier + " (assembly)", + allow_cache_upload = src_compile_cmd.cxx_compile_cmd.allow_cache_upload, + allow_dep_file_cache_upload = False, + **error_handler_args + ) + else: + assembly = None + + if compiler_type == "clang" and provide_syntax_only: + diagnostics = ctx.actions.declare_output( + "__diagnostics__", + "{}.diag.txt".format(short_path), + ) + syntax_only_cmd = _get_base_compile_cmd( + bitcode_args = bitcode_args, + src_compile_cmd = src_compile_cmd, + pic = pic, + output_args = cmd_args("-fsyntax-only"), + ) + ctx.actions.run( + [ + toolchain.internal_tools.stderr_to_file, + cmd_args(diagnostics.as_output(), format = "--out={}"), + syntax_only_cmd, + ], + category = "check", + identifier = short_path, + allow_cache_upload = src_compile_cmd.cxx_compile_cmd.allow_cache_upload, + allow_dep_file_cache_upload = False, + **error_handler_args + ) + else: + diagnostics = None + + # Generate pre-processed sources + preproc = ctx.actions.declare_output( + "__preprocessed__", + "{}.{}".format(filename_base, "i"), + ) + preproc_cmd = _get_base_compile_cmd(bitcode_args, src_compile_cmd, pic, cmd_args("-E", "-dD", get_output_flags(compiler_type, preproc))) + ctx.actions.run( + preproc_cmd, + category = src_compile_cmd.cxx_compile_cmd.category, + identifier = identifier + " (preprocessor)", + allow_cache_upload = src_compile_cmd.cxx_compile_cmd.allow_cache_upload, + allow_dep_file_cache_upload = False, + **error_handler_args + ) + + return CxxCompileOutput( + object = object, + object_format = object_format, + object_has_external_debug_info = object_has_external_debug_info, + clang_remarks = clang_remarks, + clang_trace = clang_trace, + gcno_file = gcno_file, + index_store = index_store, + assembly = assembly, + diagnostics = diagnostics, + preproc = preproc, + ) + +def _get_base_compile_cmd( + bitcode_args: cmd_args, + src_compile_cmd: CxxSrcCompileCommand, + pic: bool, + output_args: cmd_args | None = None, + use_header_units: bool = False) -> cmd_args: + """ + Construct a shared compile command for a single CXX source based on + `src_compile_command` and other compilation options. + """ + cmd = cmd_args(src_compile_cmd.cxx_compile_cmd.base_compile_cmd) + if output_args: + cmd.add(output_args) + + compiler_type = src_compile_cmd.cxx_compile_cmd.compiler_type + + args = cmd_args() + + if pic: + args.add(get_pic_flags(compiler_type)) + + if use_header_units and src_compile_cmd.cxx_compile_cmd.private_header_units_argsfile: + args.add(src_compile_cmd.cxx_compile_cmd.private_header_units_argsfile.cmd_form) + + args.add(src_compile_cmd.cxx_compile_cmd.argsfile.cmd_form) + args.add(src_compile_cmd.args) + + cmd.add(args) + cmd.add(bitcode_args) + + return cmd + def compile_cxx( ctx: AnalysisContext, src_compile_cmds: list[CxxSrcCompileCommand], - pic: bool = False) -> list[CxxCompileOutput]: + flavor: CxxCompileFlavor, + provide_syntax_only: bool, + use_header_units: bool = False) -> list[CxxCompileOutput]: """ For a given list of src_compile_cmds, generate output artifacts. """ @@ -391,112 +693,294 @@ def compile_cxx( objects = [] for src_compile_cmd in src_compile_cmds: - identifier = src_compile_cmd.src.short_path - if src_compile_cmd.index != None: - # Add a unique postfix if we have duplicate source files with different flags - identifier = identifier + "_" + str(src_compile_cmd.index) - - filename_base = identifier + (".pic" if pic else "") - object = ctx.actions.declare_output( - "__objects__", - "{}.{}".format(filename_base, linker_info.object_file_extension), + cxx_compile_output = _compile_single_cxx( + ctx = ctx, + toolchain = toolchain, + default_object_format = default_object_format, + bitcode_args = bitcode_args, + optimization_flags = toolchain.optimization_compiler_flags_EXPERIMENTAL if flavor == CxxCompileFlavor("pic_optimized") else [], + src_compile_cmd = src_compile_cmd, + pic = flavor != CxxCompileFlavor("default"), + provide_syntax_only = provide_syntax_only, + use_header_units = use_header_units, ) + objects.append(cxx_compile_output) - cmd = cmd_args(src_compile_cmd.cxx_compile_cmd.base_compile_cmd) - - compiler_type = src_compile_cmd.cxx_compile_cmd.compiler_type - cmd.add(get_output_flags(compiler_type, object)) - - args = cmd_args() - - if pic: - args.add(get_pic_flags(compiler_type)) - - args.add(src_compile_cmd.cxx_compile_cmd.argsfile.cmd_form) - args.add(src_compile_cmd.args) - - cmd.add(args) - cmd.add(bitcode_args) - - action_dep_files = {} - - headers_dep_files = src_compile_cmd.cxx_compile_cmd.headers_dep_files - if headers_dep_files: - dep_file = ctx.actions.declare_output( - paths.join("__dep_files__", filename_base), - ).as_output() - - processor_flags, compiler_flags = headers_dep_files.mk_flags(ctx.actions, filename_base, src_compile_cmd.src) - cmd.add(compiler_flags) - - # API: First argument is the dep file source path, second is the - # dep file destination path, other arguments are the actual compile - # command. - cmd = cmd_args([ - headers_dep_files.processor, - headers_dep_files.dep_tracking_mode.value, - processor_flags, - headers_dep_files.tag.tag_artifacts(dep_file), - cmd, - ]) - - action_dep_files["headers"] = headers_dep_files.tag + return objects - if pic: - identifier += " (pic)" +def _compiler_supports_header_units(compiler_info: typing.Any): + return (compiler_info.compiler_type == "clang" and + compiler_info.supports_two_phase_compilation) + +def _get_module_name(ctx: AnalysisContext, group_name: str) -> str: + return paths.normalize(paths.join( + "__header_units__", + ctx.label.package, + "{}{}.h".format(ctx.label.name, group_name), + )) + +def _get_import_filename(ctx: AnalysisContext, group_name: str) -> str: + return paths.normalize(paths.join( + ctx.label.package, + "__import__{}{}.h".format(ctx.label.name, group_name), + )) + +def _is_standalone_header(header: CHeader) -> bool: + if header.artifact.extension not in HeaderExtension.values(): + return False + if header.name.endswith("-inl.h"): + return False + if header.name.endswith(".tcc"): + return False + if header.name.endswith("-pre.h"): + return False + if header.name.endswith("-post.h"): + return False + return True + +def _convert_raw_header( + ctx: AnalysisContext, + raw_header: Artifact, + include_dirs: list[CellPath]) -> CHeader: + package_prefix = str(ctx.label.path) + ns = paths.dirname(raw_header.short_path) + for d in include_dirs: + abs_dir = str(d) + if paths.starts_with(abs_dir, package_prefix): + prefix = paths.relativize(abs_dir, package_prefix) + if paths.starts_with(ns, prefix): + ns = paths.relativize(ns, prefix) + break + return CHeader( + artifact = raw_header, + name = raw_header.basename, + namespace = ns, + named = False, + ) - clang_remarks = None - if toolchain.clang_remarks and compiler_type == "clang": - args.add(["-fsave-optimization-record", "-fdiagnostics-show-hotness", "-foptimization-record-passes=" + toolchain.clang_remarks]) - clang_remarks = ctx.actions.declare_output( - paths.join("__objects__", "{}.opt.yaml".format(filename_base)), - ) - cmd.hidden(clang_remarks.as_output()) +def _create_precompile_cmd( + ctx: AnalysisContext, + compiler_info: typing.Any, + preprocessors: list[CPreprocessor], + header_group: str | None, + group_name: str, + extra_preprocessors: list[CPreprocessor], + cmd: _CxxCompileCommand) -> _CxxSrcPrecompileCommand: + include_dirs = flatten([x.include_dirs for x in preprocessors]) + converted_headers = [ + _convert_raw_header(ctx, raw_header, include_dirs) + for raw_header in flatten([x.raw_headers for x in preprocessors]) + ] + headers = [ + header + for header in flatten([x.headers for x in preprocessors]) + converted_headers + if (_is_standalone_header(header) if header_group == None else regex_match(header_group, header.name)) + ] + + module_name = _get_module_name(ctx, group_name) + import_name = _get_import_filename(ctx, group_name) + input_header = ctx.actions.write(module_name, "") + + import_stub = ctx.actions.write( + import_name, + """ +#ifdef FACEBOOK_CPP_HEADER_UNIT +export +#endif +import \"{}\"; +""".format(module_name), + ) - clang_trace = None - if toolchain.clang_trace and compiler_type == "clang": - args.add(["-ftime-trace"]) - clang_trace = ctx.actions.declare_output( - paths.join("__objects__", "{}.json".format(filename_base)), - ) - cmd.hidden(clang_trace.as_output()) + modulemap_headers = [] + symlinked_files = {} + for header in headers: + path = paths.normalize(paths.join(header.namespace, header.name)) + symlinked_files[path] = import_stub + modulemap_headers.append(path) + + modulemap_content = """ +module "{}" {{ + header "{}" + export * +}} +""".format(module_name, module_name) + modulemap_file = ctx.actions.write("module.modulemap" + group_name, modulemap_content) + + src_dir = ctx.actions.symlinked_dir( + "header-unit" + group_name, + symlinked_files | { + module_name: input_header, + import_name: import_stub, + "module.modulemap": modulemap_file, + }, + ) - ctx.actions.run( - cmd, - category = src_compile_cmd.cxx_compile_cmd.category, - identifier = identifier, - dep_files = action_dep_files, - allow_cache_upload = src_compile_cmd.cxx_compile_cmd.allow_cache_upload, + args = [] + args.extend([ + "-DFACEBOOK_CPP_HEADER_UNIT=1", + # TODO(nml): Fix warning bugs. + "-Wno-uninitialized", + "-Wno-conversion", + "-Wno-zero-as-null-pointer-constant", + "-Wno-c++98-compat-extra-semi", + ]) + + extra_argsfile = None + if extra_preprocessors: + extra_argsfile = _mk_header_units_argsfile( + ctx = ctx, + compiler_info = compiler_info, + preprocessor = cxx_merge_cpreprocessors(ctx, extra_preprocessors, []), + name = "export" + group_name, + ext = CxxExtension(".cpp"), ) - # If we're building with split debugging, where the debug info is in the - # original object, then add the object as external debug info - # FIXME: ThinLTO generates debug info in a separate dwo dir, but we still - # need to track object files if the object file is not compiled to bitcode. - # We should track whether ThinLTO is used on a per-object basis rather than - # globally on a toolchain level. - object_has_external_debug_info = ( - toolchain.split_debug_mode == SplitDebugMode("single") - ) + for header in headers: + args.extend(["-include", paths.join(header.namespace, header.name)]) + args.extend(["-xc++-user-header", "-fmodule-header"]) + args.extend(["-fmodule-name={}".format(module_name)]) + args.extend(["-Xclang", cmd_args(input_header, format = "-fmodules-embed-file={}")]) + args.extend(["--precompile", input_header]) + + return _CxxSrcPrecompileCommand( + src = src_dir, + cxx_compile_cmd = cmd, + args = args, + extra_argsfile = extra_argsfile, + ) - # .S extension is native assembly code (machine level, processor specific) - # and clang will happily compile them to .o files, but the object are always - # native even if we ask for bitcode. If we don't mark the output format, - # other tools would try and parse the .o file as LLVM-IR and fail. - if src_compile_cmd.src.extension in [".S", ".s"]: - object_format = CxxObjectFormat("native") - else: - object_format = default_object_format +def _precompile_single_cxx( + ctx: AnalysisContext, + impl_params: CxxRuleConstructorParams, + group_name: str, + src_compile_cmd: _CxxSrcPrecompileCommand) -> HeaderUnit: + identifier = src_compile_cmd.src.short_path + + filename = "{}.pcm".format(identifier) + module = ctx.actions.declare_output("__pcm_files__", filename) + + cmd = cmd_args(src_compile_cmd.cxx_compile_cmd.base_compile_cmd) + if src_compile_cmd.cxx_compile_cmd.header_units_argsfile: + cmd.add(src_compile_cmd.cxx_compile_cmd.header_units_argsfile.cmd_form) + if src_compile_cmd.extra_argsfile: + cmd.add(src_compile_cmd.extra_argsfile.cmd_form) + cmd.add(src_compile_cmd.cxx_compile_cmd.argsfile.cmd_form) + cmd.add(src_compile_cmd.args) + cmd.add(["-o", module.as_output()]) + + action_dep_files = {} + headers_dep_files = src_compile_cmd.cxx_compile_cmd.headers_dep_files + if headers_dep_files: + dep_file = ctx.actions.declare_output( + paths.join("__dep_files__", identifier), + ).as_output() + + processor_flags, compiler_flags = headers_dep_files.mk_flags( + ctx.actions, + identifier, + src_compile_cmd.src, + ) + cmd.add(compiler_flags) + + # API: First argument is the dep file source path, second is the + # dep file destination path, other arguments are the actual compile + # command. + cmd = cmd_args([ + headers_dep_files.processor, + headers_dep_files.dep_tracking_mode.value, + processor_flags, + headers_dep_files.tag.tag_artifacts(dep_file), + cmd, + ]) + action_dep_files["headers"] = headers_dep_files.tag + + ctx.actions.run( + cmd, + category = "cxx_precompile", + identifier = identifier, + dep_files = action_dep_files, + allow_cache_upload = src_compile_cmd.cxx_compile_cmd.allow_cache_upload, + allow_dep_file_cache_upload = False, + ) - objects.append(CxxCompileOutput( - object = object, - object_format = object_format, - object_has_external_debug_info = object_has_external_debug_info, - clang_remarks = clang_remarks, - clang_trace = clang_trace, - )) + return HeaderUnit( + name = _get_module_name(ctx, group_name), + module = module, + include_dir = src_compile_cmd.src, + import_include = _get_import_filename(ctx, group_name) if impl_params.export_header_unit == "preload" else None, + ) - return objects +def precompile_cxx( + ctx: AnalysisContext, + impl_params: CxxRuleConstructorParams, + preprocessors: list[CPreprocessor], + compile_cmd_output: CxxCompileCommandOutput) -> list[CPreprocessor]: + """ + Produces header units for the target and returns a list of preprocessors enabling + them; depending on those preprocessors will allow the corresponding module to load. + """ + toolchain = get_cxx_toolchain_info(ctx) + if not _compiler_supports_header_units(toolchain.cxx_compiler_info): + return [] + + ext = CxxExtension(".cpp") + if ext not in compile_cmd_output.base_compile_cmds: + return [] + cmd = compile_cmd_output.base_compile_cmds[ext] + + header_unit_preprocessors = [] + if len(impl_params.export_header_unit_filter) <= 1: + group = None + if impl_params.export_header_unit_filter: + group = impl_params.export_header_unit_filter[0] + precompile_cmd = _create_precompile_cmd( + ctx = ctx, + compiler_info = toolchain.cxx_compiler_info, + preprocessors = preprocessors, + header_group = group, + group_name = "", + extra_preprocessors = [], + cmd = cmd, + ) + header_unit = _precompile_single_cxx(ctx, impl_params, "", precompile_cmd) + header_unit_preprocessors.append(CPreprocessor(header_units = [header_unit])) + else: + # Chain preprocessors in order. + i = 0 + for header_group in impl_params.export_header_unit_filter: + name = ".{}".format(i) + precompile_cmd = _create_precompile_cmd( + ctx = ctx, + compiler_info = toolchain.cxx_compiler_info, + preprocessors = preprocessors, + header_group = header_group, + group_name = name, + extra_preprocessors = header_unit_preprocessors, + cmd = cmd, + ) + header_unit = _precompile_single_cxx(ctx, impl_params, name, precompile_cmd) + header_unit_preprocessors.append(CPreprocessor(header_units = [header_unit])) + i += 1 + + return header_unit_preprocessors + +def cxx_objects_sub_targets(outs: list[CxxCompileOutput]) -> dict[str, list[Provider]]: + objects_sub_targets = {} + for obj in outs: + sub_targets = {} + if obj.clang_trace: + sub_targets["clang-trace"] = [DefaultInfo(obj.clang_trace)] + if obj.clang_remarks: + sub_targets["clang-remarks"] = [DefaultInfo(obj.clang_remarks)] + if obj.assembly: + sub_targets["assembly"] = [DefaultInfo(obj.assembly)] + if obj.preproc: + sub_targets["preprocessed"] = [DefaultInfo(obj.preproc)] + objects_sub_targets[obj.object.short_path] = [DefaultInfo( + obj.object, + sub_targets = sub_targets, + )] + return objects_sub_targets def _validate_target_headers(ctx: AnalysisContext, preprocessor: list[CPreprocessor]): path_to_artifact = {} @@ -512,7 +996,7 @@ def _validate_target_headers(ctx: AnalysisContext, preprocessor: list[CPreproces def _get_compiler_info(toolchain: CxxToolchainInfo, ext: CxxExtension) -> typing.Any: compiler_info = None - if ext.value in (".cpp", ".cc", ".mm", ".cxx", ".c++", ".h", ".hpp", ".hh", ".h++", ".hxx"): + if ext.value in (".cpp", ".cc", ".mm", ".cxx", ".c++", ".h", ".hpp", ".hh", ".h++", ".hxx", ".bc"): compiler_info = toolchain.cxx_compiler_info elif ext.value in (".c", ".m"): compiler_info = toolchain.c_compiler_info @@ -548,18 +1032,21 @@ def _get_category(ext: CxxExtension) -> str: return "cuda_compile" elif ext.value == ".hip": return "hip_compile" + elif ext.value == ".bc": + return "bitcode_compile" else: # This should be unreachable as long as we handle all enum values fail("Unknown extension: " + ext.value) -def _get_compile_base(compiler_info: typing.Any) -> cmd_args: +def _get_compile_base(toolchain: CxxToolchainInfo, compiler_info: typing.Any) -> cmd_args: """ Given a compiler info returned by _get_compiler_info, form the base compile args. """ - cmd = cmd_args(compiler_info.compiler) - - return cmd + if toolchain.remap_cwd and compiler_info.compiler_type in ["clang", "clang_windows", "clang_cl"]: + return cmd_args(toolchain.internal_tools.remap_cwd, compiler_info.compiler) + else: + return cmd_args(compiler_info.compiler) def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: # Raw assembly doesn't make sense to capture dep files for. @@ -571,8 +1058,11 @@ def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: # TODO (T118797886): HipCompilerInfo doesn't have dep files processor. # Should it? return None + elif ext.value == ".bc": + # Bitcode doesn't have depfiles + return None - # Return the file type aswell + # Return the file type as well if ext.value in (".cpp", ".cc", ".mm", ".cxx", ".c++", ".h", ".hpp", ".hh", ".h++", ".hxx"): return DepFileType("cpp") elif ext.value in (".c", ".m"): @@ -585,91 +1075,186 @@ def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: # This should be unreachable as long as we handle all enum values fail("Unknown C++ extension: " + ext.value) -def _add_compiler_info_flags(ctx: AnalysisContext, compiler_info: typing.Any, ext: CxxExtension, cmd: cmd_args): - cmd.add(compiler_info.preprocessor_flags or []) - cmd.add(compiler_info.compiler_flags or []) - cmd.add(get_flags_for_reproducible_build(ctx, compiler_info.compiler_type)) +def _add_compiler_info_flags(ctx: AnalysisContext, compiler_info: typing.Any, ext: CxxExtension) -> list: + cmd = [] + cmd.append(compiler_info.preprocessor_flags or []) + cmd.append(compiler_info.compiler_flags or []) + cmd.append(get_flags_for_reproducible_build(ctx, compiler_info.compiler_type)) if ext.value not in (".asm", ".asmpp"): # Clang's asm compiler doesn't support colorful output, so we skip this there. - cmd.add(get_flags_for_colorful_output(compiler_info.compiler_type)) + cmd.append(get_flags_for_colorful_output(compiler_info.compiler_type)) + + return cmd def _mk_argsfile( ctx: AnalysisContext, + file_name: str, + args_list: list, + is_nasm: bool, + is_xcode_argsfile: bool) -> Artifact: + if is_xcode_argsfile: + replace_regex = [] + for re, sub in _XCODE_ARG_SUBSTITUTION: + replace_regex.append((re, sub)) + file_args = cmd_args(args_list, replace_regex = replace_regex) + else: + file_args = cmd_args(args_list) if is_nasm else cmd_args(args_list, quote = "shell") + argsfile, _ = ctx.actions.write(file_name, file_args, allow_args = True) + return argsfile + +def _mk_argsfiles( + ctx: AnalysisContext, + impl_params: CxxRuleConstructorParams, compiler_info: typing.Any, preprocessor: CPreprocessorInfo, ext: CxxExtension, headers_tag: ArtifactTag, - use_absolute_paths: bool) -> CompileArgsfile: + is_xcode_argsfile: bool) -> CompileArgsfile: """ Generate and return an {ext}.argsfile artifact and command args that utilize the argsfile. """ - args = cmd_args() + is_nasm = compiler_info.compiler_type == "nasm" + filename_prefix = "xcode_" if is_xcode_argsfile else "" - _add_compiler_info_flags(ctx, compiler_info, ext, args) + argsfiles = [] + args_list = [] - if use_absolute_paths: - args.add(preprocessor.set.project_as_args("abs_args")) - else: - args.add(headers_tag.tag_artifacts(preprocessor.set.project_as_args("args"))) + compiler_info_flags = _add_compiler_info_flags(ctx, compiler_info, ext) + compiler_info_filename = ext.value + ".{}toolchain_cxx_args".format(filename_prefix) + argsfiles.append(_mk_argsfile(ctx, compiler_info_filename, compiler_info_flags, is_nasm, is_xcode_argsfile)) + args_list.append(compiler_info_flags) + + deps_args = [] + deps_args.append(headers_tag.tag_artifacts(preprocessor.set.project_as_args("args"))) # Different preprocessors will contain whether to use modules, # and the modulemap to use, so we need to get the final outcome. if preprocessor.set.reduce("uses_modules"): - args.add(headers_tag.tag_artifacts(preprocessor.set.project_as_args("modular_args"))) + deps_args.append(headers_tag.tag_artifacts(preprocessor.set.project_as_args("modular_args"))) - args.add(cxx_attr_preprocessor_flags(ctx, ext.value)) - args.add(get_flags_for_compiler_type(compiler_info.compiler_type)) - args.add(_attr_compiler_flags(ctx, ext.value)) - args.add(headers_tag.tag_artifacts(preprocessor.set.project_as_args("include_dirs"))) + deps_argsfile_filename = ext.value + ".{}deps_cxx_args".format(filename_prefix) + argsfiles.append(_mk_argsfile(ctx, deps_argsfile_filename, deps_args, is_nasm, is_xcode_argsfile)) + args_list.extend(deps_args) + + target_args = [] + target_args.append(_preprocessor_flags(ctx, impl_params, ext.value)) + target_args.append(get_flags_for_compiler_type(compiler_info.compiler_type)) + target_args.append(_compiler_flags(ctx, impl_params, ext.value)) + target_args.append(headers_tag.tag_artifacts(preprocessor.set.project_as_args("include_dirs"))) # Workaround as that's not precompiled, but working just as prefix header. # Another thing is that it's clang specific, should be generalized. - if ctx.attrs.precompiled_header != None: - args.add(["-include", headers_tag.tag_artifacts(ctx.attrs.precompiled_header[CPrecompiledHeaderInfo].header)]) - if ctx.attrs.prefix_header != None: - args.add(["-include", headers_tag.tag_artifacts(ctx.attrs.prefix_header)]) + if hasattr(ctx.attrs, "precompiled_header") and ctx.attrs.precompiled_header != None: + target_args.append(["-include", headers_tag.tag_artifacts(ctx.attrs.precompiled_header[CPrecompiledHeaderInfo].header)]) + if hasattr(ctx.attrs, "prefix_header") and ctx.attrs.prefix_header != None: + target_args.append(["-include", headers_tag.tag_artifacts(ctx.attrs.prefix_header)]) + + target_argsfile_filename = ext.value + ".{}target_cxx_args".format(filename_prefix) + argsfiles.append(_mk_argsfile(ctx, target_argsfile_filename, target_args, is_nasm, is_xcode_argsfile)) + args_list.extend(target_args) # Create a copy of the args so that we can continue to modify it later. - args_without_file_prefix_args = cmd_args(args) + args_without_file_prefix_args = cmd_args(args_list) - # Put file_prefix_args in argsfile directly, make sure they do not appear when evaluating $(cxxppflags) + # Put file_prefix_args in argsfile, make sure they do not appear when evaluating $(cxxppflags) # to avoid "argument too long" errors - if use_absolute_paths: - args.add(cmd_args(preprocessor.set.project_as_args("abs_file_prefix_args"))) - - # HACK: Replace Xcode clang incompatible flags with compatible ones. - # TODO: Refactor this to be a true Xcode argsfile generating flow. - for re, sub in _ABSOLUTE_ARGSFILE_SUBSTITUTIONS: - args.replace_regex(re, sub) + file_prefix_args = headers_tag.tag_artifacts(cmd_args(preprocessor.set.project_as_args("file_prefix_args"))) + file_prefix_args_filename = ext.value + ".{}file_prefix_cxx_args".format(filename_prefix) + argsfiles.append(_mk_argsfile(ctx, file_prefix_args_filename, [file_prefix_args], is_nasm, is_xcode_argsfile)) + args_list.append(file_prefix_args) + + if is_xcode_argsfile: + replace_regex = [] + for re, sub in _XCODE_ARG_SUBSTITUTION: + replace_regex.append((re, sub)) + args = cmd_args(args_list, replace_regex = replace_regex) + file_args = cmd_args(argsfiles, format = "@{}") else: - args.add(headers_tag.tag_artifacts(cmd_args(preprocessor.set.project_as_args("file_prefix_args")))) + args = cmd_args(args_list) if is_nasm else cmd_args(args_list, quote = "shell") + file_args = cmd_args(argsfiles, format = "-@{}") if is_nasm else cmd_args(argsfiles, format = "@{}", quote = "shell") - shell_quoted_args = cmd_args(args, quote = "shell") + file_name = ext.value + ".{}cxx_compile_argsfile".format(filename_prefix) - file_name = ext.value + ("-abs.argsfile" if use_absolute_paths else ".argsfile") - argsfile, _ = ctx.actions.write(file_name, shell_quoted_args, allow_args = True, absolute = use_absolute_paths) + # For Xcode to parse argsfiles of argsfiles, the paths in the former must be absolute. + argsfile, _ = ctx.actions.write(file_name, file_args, allow_args = True, absolute = is_xcode_argsfile) - input_args = [args] + input_args = [args, file_args] - cmd_form = cmd_args(argsfile, format = "@{}").hidden(input_args) + format = "-@{}" if is_nasm else "@{}" + cmd_form = cmd_args(argsfile, format = format, hidden = input_args) return CompileArgsfile( file = argsfile, cmd_form = cmd_form, input_args = input_args, - args = shell_quoted_args, + args = args, args_without_file_prefix_args = args_without_file_prefix_args, ) -def _attr_compiler_flags(ctx: AnalysisContext, ext: str) -> list[typing.Any]: +def _compiler_flags(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, ext: str) -> list[typing.Any]: return ( - cxx_by_language_ext(ctx.attrs.lang_compiler_flags, ext) + - flatten(cxx_by_platform(ctx, ctx.attrs.platform_compiler_flags)) + - flatten(cxx_by_platform(ctx, cxx_by_language_ext(ctx.attrs.lang_platform_compiler_flags, ext))) + + cxx_by_language_ext(impl_params.lang_compiler_flags, ext) + + flatten(cxx_by_platform(ctx, impl_params.platform_compiler_flags)) + + flatten(cxx_by_platform(ctx, cxx_by_language_ext(impl_params.lang_platform_compiler_flags, ext))) + # ctx.attrs.compiler_flags need to come last to preserve buck1 ordering, this prevents compiler # flags ordering-dependent build errors - ctx.attrs.compiler_flags + impl_params.compiler_flags + ) + +def _preprocessor_flags(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, ext: str) -> list[typing.Any]: + return ( + impl_params.preprocessor_flags + + cxx_by_language_ext(impl_params.lang_preprocessor_flags, ext) + + flatten(cxx_by_platform(ctx, impl_params.platform_preprocessor_flags)) + + flatten(cxx_by_platform(ctx, cxx_by_language_ext(impl_params.lang_platform_preprocessor_flags, ext))) + ) + +def _mk_header_units_argsfile( + ctx: AnalysisContext, + compiler_info: typing.Any, + preprocessor: CPreprocessorInfo, + name: str, + ext: CxxExtension) -> CompileArgsfile | None: + """ + Generate and return an argsfile artifact containing all header unit options, and + command args that utilize the argsfile. + """ + if not preprocessor.set: + return None + if _get_category(ext) != "cxx_compile": + return None + if not _compiler_supports_header_units(compiler_info): + return None + + file_name = "{}.{}.header_units_args".format(ext.value, name) + args = cmd_args() + args.add([ + # TODO(nml): We only support Clang 17+, which don't need/want the extra -f + # arguments when compiling C++20. Clang 15 is too buggy to work properly, but if + # you wanted to try, you would need the below options at the very least, to get + # started: + # "-fmodules", + # "-fno-implicit-modules", + # "-fno-implicit-module-maps", + "-Wno-experimental-header-units", + "-Wno-ambiguous-macro", + ]) + + # TODO(nml): Tag args with headers_tag.tag_artifacts() once -MD -MF reports correct + # usage of PCMs. + args.add(preprocessor.set.project_as_args("header_units_args")) + input_args = [args] + file_args = cmd_args(args, quote = "shell") + argsfile, _ = ctx.actions.write(file_name, file_args, allow_args = True) + cmd_form = cmd_args(argsfile, format = "@{}", hidden = input_args) + + return CompileArgsfile( + file = argsfile, + cmd_form = cmd_form, + input_args = input_args, + args = file_args, + args_without_file_prefix_args = args, ) def _get_dep_tracking_mode(toolchain: Provider, file_type: DepFileType) -> DepTrackingMode: @@ -679,3 +1264,50 @@ def _get_dep_tracking_mode(toolchain: Provider, file_type: DepFileType) -> DepTr return toolchain.cuda_dep_tracking_mode else: return DepTrackingMode("makefile") + +def _generate_base_compile_command( + ctx: AnalysisContext, + impl_params: CxxRuleConstructorParams, + pre: CPreprocessorInfo, + header_pre: CPreprocessorInfo, + headers_tag: ArtifactTag, + ext: CxxExtension) -> _CxxCompileCommand: + """ + Generate a common part of a compile command that is shared by all sources + with a given extension. + """ + toolchain = get_cxx_toolchain_info(ctx) + compiler_info = _get_compiler_info(toolchain, ext) + base_compile_cmd = _get_compile_base(toolchain, compiler_info) + category = _get_category(ext) + + headers_dep_files = None + dep_file_file_type_hint = _dep_file_type(ext) + if dep_file_file_type_hint != None and toolchain.use_dep_files: + tracking_mode = _get_dep_tracking_mode(toolchain, dep_file_file_type_hint) + mk_dep_files_flags = get_headers_dep_files_flags_factory(tracking_mode) + if mk_dep_files_flags: + headers_dep_files = _HeadersDepFiles( + processor = cmd_args(toolchain.internal_tools.dep_file_processor), + mk_flags = mk_dep_files_flags, + tag = headers_tag, + dep_tracking_mode = tracking_mode, + ) + + argsfile = _mk_argsfiles(ctx, impl_params, compiler_info, pre, ext, headers_tag, False) + xcode_argsfile = _mk_argsfiles(ctx, impl_params, compiler_info, pre, ext, headers_tag, True) + header_units_argsfile = _mk_header_units_argsfile(ctx, compiler_info, header_pre, "public", ext) + private_header_units_argsfile = _mk_header_units_argsfile(ctx, compiler_info, pre, "private", ext) + + allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs, default = compiler_info.allow_cache_upload) + return _CxxCompileCommand( + base_compile_cmd = base_compile_cmd, + argsfile = argsfile, + xcode_argsfile = xcode_argsfile, + header_units_argsfile = header_units_argsfile, + private_header_units_argsfile = private_header_units_argsfile, + headers_dep_files = headers_dep_files, + compiler_type = compiler_info.compiler_type, + category = category, + allow_cache_upload = allow_cache_upload, + ) diff --git a/prelude/cxx/compiler.bzl b/prelude/cxx/compiler.bzl index 37967f4e2d6..cad51cf3a1f 100644 --- a/prelude/cxx/compiler.bzl +++ b/prelude/cxx/compiler.bzl @@ -58,7 +58,9 @@ def tree_style_cc_dep_files( _actions: AnalysisActions, _filename_base: str, input_file: Artifact) -> (cmd_args, cmd_args): - return (cmd_args(input_file), cmd_args(["-H"])) + # If we use color diagnostics, then error messages come through in color, which messes up parsing of the + # -H output in `show_headers_to_dep_file.py`. So make sure to pass -fno-color-diagnostics. + return (cmd_args(input_file), cmd_args(["-H", "-fno-color-diagnostics"])) def windows_cc_dep_files( _actions: AnalysisActions, diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 0be9df6c8bd..e9eed25e859 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -11,12 +11,12 @@ load( "merge_android_packageable_info", ) load("@prelude//apple:resource_groups.bzl", "create_resource_graph") -load( - "@prelude//apple:xcode.bzl", - "get_project_root_file", -) load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type +) load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_groups.bzl", @@ -33,7 +33,6 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "SharedLibLinkable", "create_merged_link_info", @@ -56,13 +55,28 @@ load( "@prelude//linking:linkables.bzl", "linkables", ) -load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "create_shared_libraries", "merge_shared_libraries") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraries", + "SharedLibraryInfo", + "create_shlib_from_ctx", + "extract_soname_from_shlib", + "merge_shared_libraries", + "to_soname", +) load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") +load("@prelude//python:manifest.bzl", "create_manifest_for_entries") load( "@prelude//tests:re_utils.bzl", "get_re_executors_from_props", ) +load( + "@prelude//third-party:build.bzl", + "create_third_party_build_info", +) +load("@prelude//unix:providers.bzl", "UnixEnv", "create_unix_env_info") load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:utils.bzl", @@ -94,8 +108,8 @@ load( "CxxRuleSubTargetParams", ) load( - ":groups.bzl", - "Group", # @unused Used as a type + ":groups_types.bzl", + "Group", "MATCH_ALL_LABEL", "NO_MATCH_LABEL", ) @@ -110,7 +124,6 @@ load( ) load( ":link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLibSpec", "get_link_group_info", ) @@ -179,7 +192,9 @@ def cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: sub_target_params, provider_params = _get_params_for_android_binary_cxx_library() else: sub_target_params = CxxRuleSubTargetParams() - provider_params = CxxRuleProviderParams() + provider_params = CxxRuleProviderParams( + third_party_build = True, + ) params = CxxRuleConstructorParams( rule_type = "cxx_library", @@ -188,6 +203,17 @@ def cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: output_style_sub_targets_and_providers_factory = _get_shared_link_style_sub_targets_and_providers, generate_sub_targets = sub_target_params, generate_providers = provider_params, + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, + use_header_units = ctx.attrs.use_header_units, + export_header_unit = ctx.attrs.export_header_unit, + export_header_unit_filter = ctx.attrs.export_header_unit_filter, ) output = cxx_library_parameterized(ctx, params) return output.providers @@ -239,6 +265,15 @@ def cxx_binary_impl(ctx: AnalysisContext) -> list[Provider]: prefer_stripped_objects = ctx.attrs.prefer_stripped_objects, exe_allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs), extra_link_roots = linkables(ctx.attrs.link_group_deps), + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, + use_header_units = ctx.attrs.use_header_units, ) output = cxx_executable(ctx, params) @@ -248,6 +283,28 @@ def cxx_binary_impl(ctx: AnalysisContext) -> list[Provider]: if output.sanitizer_runtime_files: extra_providers.append(CxxSanitizerRuntimeInfo(runtime_files = output.sanitizer_runtime_files)) + # Unix env provider. + extra_providers.append( + create_unix_env_info( + actions = ctx.actions, + env = UnixEnv( + label = ctx.label, + binaries = [ + create_manifest_for_entries( + ctx = ctx, + name = "unix_env", + entries = [ + (ctx.label.name, output.binary, ""), + ], + ), + ], + ), + # TODO(agallagher): We only want to traverse deps when dynamically + # linking. + #deps = ctx.attrs.deps, + ), + ) + # When an executable is the output of a build, also materialize all the # unpacked external debuginfo that goes with it. This makes `buck2 build # :main` equivalent to `buck2 build :main :main[debuginfo]`. @@ -272,7 +329,7 @@ def cxx_binary_impl(ctx: AnalysisContext) -> list[Provider]: other_outputs = other_outputs, sub_targets = output.sub_targets, ), - RunInfo(args = cmd_args(output.binary).hidden(output.runtime_files)), + RunInfo(args = cmd_args(output.binary, hidden = output.runtime_files)), output.compilation_db, output.xcode_data, output.dist_info, @@ -359,31 +416,37 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: # Prepare the stripped static lib. static_lib_stripped = None - if static_lib != None: + if not ctx.attrs.prestripped and static_lib != None: static_lib_stripped = strip_debug_info(ctx, static_lib.short_path, static_lib) # Prepare the stripped static PIC lib. If the static PIC lib is the same # artifact as the static lib, then just re-use the stripped static lib. static_pic_lib_stripped = None - if static_lib == static_pic_lib: - static_pic_lib_stripped = static_lib_stripped - elif static_pic_lib != None: - static_pic_lib_stripped = strip_debug_info(ctx, static_pic_lib.short_path, static_pic_lib) + if not ctx.attrs.prestripped: + if static_lib == static_pic_lib: + static_pic_lib_stripped = static_lib_stripped + elif static_pic_lib != None: + static_pic_lib_stripped = strip_debug_info(ctx, static_pic_lib.short_path, static_pic_lib) if ctx.attrs.soname != None: soname = get_shared_library_name_for_param(linker_info, ctx.attrs.soname) + elif shared_lib != None and ctx.attrs.extract_soname: + soname = extract_soname_from_shlib( + actions = ctx.actions, + name = "__soname__.txt", + shared_lib = shared_lib, + ) else: soname = get_shared_library_name(linker_info, ctx.label.name, apply_default_prefix = True) + soname = to_soname(soname) # Use ctx.attrs.deps instead of cxx_attr_deps, since prebuilt rules don't have platform_deps. first_order_deps = ctx.attrs.deps exported_first_order_deps = cxx_attr_exported_deps(ctx) - project_root_file = get_project_root_file(ctx) - # Exported preprocessor info. inherited_pp_infos = cxx_inherited_preprocessor_infos(exported_first_order_deps) - generic_exported_pre = cxx_exported_preprocessor_info(ctx, cxx_get_regular_cxx_headers_layout(ctx), project_root_file, []) + generic_exported_pre = cxx_exported_preprocessor_info(ctx, cxx_get_regular_cxx_headers_layout(ctx), []) args = [] compiler_type = get_cxx_toolchain_info(ctx).cxx_compiler_info.compiler_type if header_dirs != None: @@ -391,12 +454,13 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: args.append(format_system_include_arg(cmd_args(x), compiler_type)) exported_items = [generic_exported_pre] if args: - exported_items.append(CPreprocessor(relative_args = CPreprocessorArgs(args = args))) - providers.append(cxx_merge_cpreprocessors( + exported_items.append(CPreprocessor(args = CPreprocessorArgs(args = args))) + propagated_preprocessor = cxx_merge_cpreprocessors( ctx, exported_items, inherited_pp_infos, - )) + ) + providers.append(propagated_preprocessor) inherited_link = cxx_inherited_link_info(first_order_deps) inherited_exported_link = cxx_inherited_link_info(exported_first_order_deps) @@ -406,7 +470,7 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: # Gather link infos, outputs, and shared libs for effective link style. outputs = {} libraries = {} - solibs = {} + solibs = [] sub_targets = {} for output_style in get_output_styles_for_linkage(preferred_linkage): out = None @@ -421,19 +485,21 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: archive = Archive(artifact = lib), linker_type = linker_type, link_whole = ctx.attrs.link_whole, + # prebuilt archives are compiled to object code instead of IR + supports_lto = False, ) if output_style == LibOutputStyle("archive"): if static_lib: out = static_lib linkable = archive_linkable(static_lib) - linkable_stripped = archive_linkable(static_lib_stripped) + linkable_stripped = archive_linkable(static_lib_stripped) if static_lib_stripped else None elif output_style == LibOutputStyle("pic_archive"): lib = static_pic_lib or static_lib if lib: out = lib linkable = archive_linkable(lib) - linkable_stripped = archive_linkable(static_pic_lib_stripped or static_lib_stripped) + linkable_stripped = archive_linkable(static_pic_lib_stripped or static_lib_stripped) if (static_pic_lib_stripped or static_lib_stripped) else None else: # shared # If no shared library was provided, link one from the static libraries. if shared_lib != None: @@ -449,8 +515,8 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: shlink_args.extend(get_link_whole_args(linker_type, [lib])) link_result = cxx_link_shared_library( ctx = ctx, - output = soname, - name = soname, + output = soname.ensure_str(), + name = soname.ensure_str(), opts = link_options( links = [ LinkArgs(flags = shlink_args), @@ -491,12 +557,18 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: # Provided means something external to the build will provide # the libraries, so we don't need to propagate anything. if not ctx.attrs.provided: - solibs[soname] = shared_lib + solibs.append( + create_shlib_from_ctx( + ctx = ctx, + lib = shared_lib, + soname = soname, + ), + ) # Provide a sub-target that always provides the shared lib # using the soname. - if soname and shared_lib.output.basename != paths.basename(soname): - soname_lib = ctx.actions.copy_file(soname, shared_lib.output) + if soname and soname.is_str and shared_lib.output.basename != paths.basename(soname.ensure_str()): + soname_lib = ctx.actions.copy_file(soname.ensure_str(), shared_lib.output) else: soname_lib = shared_lib.output sub_targets["soname-lib"] = [DefaultInfo(default_output = soname_lib)] @@ -528,16 +600,8 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: default_output = outputs[output_style], )] - # Create the default output for the library rule given it's link style and preferred linkage cxx_toolchain = get_cxx_toolchain_info(ctx) pic_behavior = cxx_toolchain.pic_behavior - link_strategy = to_link_strategy(cxx_toolchain.linker_info.link_style) - actual_output_style = get_lib_output_style(link_strategy, preferred_linkage, pic_behavior) - output = outputs[actual_output_style] - providers.append(DefaultInfo( - default_output = output, - sub_targets = sub_targets, - )) # Propagate link info provider. providers.append(create_merged_link_info( @@ -553,19 +617,60 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: )) # Propagate shared libraries up the tree. + shared_libs = SharedLibraries(libraries = solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in exported_first_order_deps]), )) + providers.append( + create_unix_env_info( + actions = ctx.actions, + env = UnixEnv( + label = ctx.label, + native_libs = [shared_libs], + ), + deps = ctx.attrs.deps + ctx.attrs.exported_deps, + ), + ) + + # Third-party provider. + third_party_build_info = create_third_party_build_info( + ctx = ctx, + paths = [] if header_dirs == None else [(d.short_path, d) for d in header_dirs], + cxx_headers = [propagated_preprocessor], + shared_libs = shared_libs.libraries, + cxx_header_dirs = ["include"] + ([] if header_dirs == None else [d.short_path for d in header_dirs]), + deps = ctx.attrs.deps + cxx_attr_exported_deps(ctx), + ) + providers.append(third_party_build_info) + sub_targets["third-party-build"] = [ + DefaultInfo( + default_output = third_party_build_info.build.root.artifact, + sub_targets = dict( + manifest = [DefaultInfo(default_output = third_party_build_info.build.manifest)], + ), + ), + ] + + # Create the default output for the library rule given it's link style and preferred linkage + link_strategy = to_link_strategy(cxx_toolchain.linker_info.link_style) + actual_output_style = get_lib_output_style(link_strategy, preferred_linkage, pic_behavior) + output = outputs[actual_output_style] + providers.append(DefaultInfo( + default_output = output, + sub_targets = sub_targets, + )) + # Omnibus root provider. - if LibOutputStyle("pic_archive") in libraries and (static_pic_lib or static_lib) and not ctx.attrs.header_only: + if LibOutputStyle("pic_archive") in libraries and (static_pic_lib or static_lib) and not ctx.attrs.header_only and soname.is_str: # TODO(cjhopman): This doesn't support thin archives linkable_root = create_linkable_root( - name = soname, + label = ctx.label, + name = soname.ensure_str(), link_infos = LinkInfos(default = LinkInfo( - name = soname, + name = soname.ensure_str(), pre_flags = ( linker_flags.exported_flags + linker_flags.flags @@ -593,14 +698,14 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx, linkable_node = create_linkable_node( ctx = ctx, - default_soname = soname, + default_soname = soname.as_str(), preferred_linkage = preferred_linkage, default_link_strategy = to_link_strategy(cxx_toolchain.linker_info.link_style), exported_deps = exported_first_order_deps, # If we don't have link input for this link style, we pass in `None` so # that omnibus knows to avoid it. link_infos = libraries, - shared_libs = solibs, + shared_libs = shared_libs, linker_flags = linker_flags, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, ), @@ -652,10 +757,18 @@ def cxx_test_impl(ctx: AnalysisContext) -> list[Provider]: auto_link_group_specs = get_auto_link_group_specs(ctx, link_group_info), prefer_stripped_objects = ctx.attrs.prefer_stripped_objects, extra_link_roots = linkables(ctx.attrs.link_group_deps), + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, ) output = cxx_executable(ctx, params, is_cxx_test = True) - command = [cmd_args(output.binary).hidden(output.runtime_files)] + ctx.attrs.args + command = [cmd_args(output.binary, hidden = output.runtime_files)] + ctx.attrs.args # Setup RE executors based on the `remote_execution` param. re_executor, executor_overrides = get_re_executors_from_props(ctx) diff --git a/prelude/cxx/cxx_bolt.bzl b/prelude/cxx/cxx_bolt.bzl index 271ab5e804a..b355f1ae3cc 100644 --- a/prelude/cxx/cxx_bolt.bzl +++ b/prelude/cxx/cxx_bolt.bzl @@ -8,30 +8,37 @@ # BOLT (Binary Optimization Layout Tool) is a post link profile guided optimizer used for # performance-critical services in fbcode: https://www.internalfb.com/intern/wiki/HHVM-BOLT/ +load( + "@prelude//:artifact_tset.bzl", + "ArtifactTSet", + "project_artifacts", +) load(":cxx_context.bzl", "get_cxx_toolchain_info") def cxx_use_bolt(ctx: AnalysisContext) -> bool: cxx_toolchain_info = get_cxx_toolchain_info(ctx) return cxx_toolchain_info.bolt_enabled and ctx.attrs.bolt_profile != None -def bolt(ctx: AnalysisContext, prebolt_output: Artifact, identifier: [str, None]) -> Artifact: +def bolt(ctx: AnalysisContext, prebolt_output: Artifact, external_debug_info: ArtifactTSet, identifier: [str, None]) -> Artifact: output_name = prebolt_output.short_path.removesuffix("-wrapper") postbolt_output = ctx.actions.declare_output(output_name) bolt_msdk = get_cxx_toolchain_info(ctx).binary_utilities_info.bolt_msdk if not bolt_msdk or not cxx_use_bolt(ctx): fail("Cannot use bolt if bolt_msdk is not available or bolt profile is not available") - args = cmd_args() + + materialized_external_debug_info = project_artifacts(ctx.actions, [external_debug_info]) # bolt command format: # {llvm_bolt} {input_bin} -o $OUT -data={fdata} {args} - args.add( + args = cmd_args( cmd_args(bolt_msdk, format = "{}/bin/llvm-bolt"), prebolt_output, "-o", postbolt_output.as_output(), cmd_args(ctx.attrs.bolt_profile, format = "-data={}"), ctx.attrs.bolt_flags, + hidden = materialized_external_debug_info, ) ctx.actions.run( @@ -41,4 +48,23 @@ def bolt(ctx: AnalysisContext, prebolt_output: Artifact, identifier: [str, None] local_only = get_cxx_toolchain_info(ctx).linker_info.link_binaries_locally, ) - return postbolt_output + output = postbolt_output + + if hasattr(ctx.attrs, "strip_stapsdt") and ctx.attrs.strip_stapsdt: + stripped_postbolt_output = ctx.actions.declare_output(output_name + "-nostapsdt") + ctx.actions.run( + # We --rename-section instead of --remove-section because objcopy's processing + # in an invalid ELF file + cmd_args([ + get_cxx_toolchain_info(ctx).binary_utilities_info.objcopy, + "--rename-section", + ".stapsdt.base=.deleted_stapsdt_base_section", + postbolt_output, + stripped_postbolt_output.as_output(), + ]), + category = "bolt_strip_stapsdt", + identifier = identifier, + ) + output = stripped_postbolt_output + + return output diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 078f3fa51b0..b4649ec8efb 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -23,20 +23,22 @@ load( "apple_create_frameworks_linkable", "apple_get_link_info_by_deduping_link_infos", ) -load( - "@prelude//apple:xcode.bzl", - "get_project_root_file", -) load( "@prelude//cxx:cxx_bolt.bzl", "cxx_use_bolt", ) +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupsDebugLinkInfo", + "LinkGroupsDebugLinkableItem", +) load( "@prelude//dist:dist_info.bzl", "DistInfo", ) load( "@prelude//ide_integrations:xcode.bzl", + "XCODE_ARGSFILES_SUB_TARGET", "XCODE_DATA_SUB_TARGET", "XcodeDataInfo", "generate_xcode_data", @@ -69,6 +71,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type "merge_shared_libraries", "traverse_shared_library_info", ) @@ -81,7 +84,6 @@ load( ) load( ":argsfiles.bzl", - "ABS_ARGSFILES_SUBTARGET", "ARGSFILES_SUBTARGET", "get_argsfiles_output", ) @@ -93,10 +95,16 @@ load( ) load( ":compile.bzl", + "CxxCompileFlavor", "compile_cxx", "create_compile_cmds", + "cxx_objects_sub_targets", ) load(":cxx_context.bzl", "get_cxx_platform_info", "get_cxx_toolchain_info") +load( + ":cxx_instrumentation.bzl", + "is_coverage_enabled_by_any_dep", +) load( ":cxx_library_utility.bzl", "OBJECTS_SUBTARGET", @@ -105,7 +113,6 @@ load( "cxx_attr_linker_flags", "cxx_attr_resources", "cxx_is_gnu", - "cxx_objects_sub_targets", ) load( ":cxx_link_utility.bzl", @@ -115,6 +122,8 @@ load( ":cxx_types.bzl", "CxxRuleConstructorParams", # @unused Used as a type ) +load(":diagnostics.bzl", "check_sub_target") +load(":groups.bzl", "get_dedupped_roots_from_groups") load( ":link.bzl", "CxxLinkerMapData", @@ -122,10 +131,13 @@ load( ) load( ":link_groups.bzl", + "FinalLabelsToLinks", "LINK_GROUP_MAPPINGS_FILENAME_SUFFIX", "LINK_GROUP_MAPPINGS_SUB_TARGET", "LINK_GROUP_MAP_DATABASE_SUB_TARGET", "LinkGroupContext", + "build_shared_libs_for_symlink_tree", + "create_debug_linkable_entries", "create_link_groups", "find_relevant_roots", "get_filtered_labels_to_links_map", @@ -134,8 +146,8 @@ load( "get_link_group", "get_link_group_map_json", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", "get_transitive_deps_matching_labels", - "is_link_group_shlib", ) load( ":link_types.bzl", @@ -150,6 +162,7 @@ load( "PDB_SUB_TARGET", "get_dumpbin_providers", "get_pdb_providers", + "get_shared_library_name", ) load( ":preprocessor.bzl", @@ -160,11 +173,11 @@ load( CxxExecutableOutput = record( binary = Artifact, unstripped_binary = Artifact, - bitcode_bundle = field([Artifact, None], None), - dwp = field([Artifact, None]), + bitcode_bundle = field(Artifact | None, None), + dwp = field(Artifact | None), # Files that must be present for the executable to run successfully. These # are always materialized, whether the executable is the output of a build - # or executed as a host tool. They become .hidden() arguments when executing + # or executed as a host tool. They become hidden= arguments when executing # the executable via RunInfo(). runtime_files = list[ArgLike], sub_targets = dict[str, list[DefaultInfo]], @@ -176,7 +189,7 @@ CxxExecutableOutput = record( # materialized when this executable is the output of a build, not when it is # used by other rules. They become other_outputs on DefaultInfo. external_debug_info_artifacts = list[TransitiveSetArgsProjection], - shared_libs = dict[str, LinkedObject], + shared_libs = list[SharedLibrary], # All link group links that were generated in the executable. auto_link_groups = field(dict[str, LinkedObject], {}), compilation_db = CxxCompilationDbInfo, @@ -185,17 +198,15 @@ CxxExecutableOutput = record( link_command_debug_output = field([LinkCommandDebugOutput, None], None), dist_info = DistInfo, sanitizer_runtime_files = field(list[Artifact], []), + index_stores = field(list[Artifact], []), ) def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, is_cxx_test: bool = False) -> CxxExecutableOutput: - project_root_file = get_project_root_file(ctx) - # Gather preprocessor inputs. preprocessor_deps = cxx_attr_deps(ctx) + filter(None, [ctx.attrs.precompiled_header]) (own_preprocessor_info, test_preprocessor_infos) = cxx_private_preprocessor_info( ctx, impl_params.headers_layout, - project_root_file = project_root_file, raw_headers = ctx.attrs.raw_headers, extra_preprocessors = impl_params.extra_preprocessors, non_exported_deps = preprocessor_deps, @@ -214,13 +225,29 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, impl_params, [own_preprocessor_info] + test_preprocessor_infos, inherited_preprocessor_infos, + is_coverage_enabled_by_any_dep(ctx, preprocessor_deps), + ) + compile_flavor = CxxCompileFlavor("pic") if link_strategy != LinkStrategy("static") else CxxCompileFlavor("default") + cxx_outs = compile_cxx( + ctx = ctx, + src_compile_cmds = compile_cmd_output.src_compile_cmds, + flavor = compile_flavor, + provide_syntax_only = True, + use_header_units = impl_params.use_header_units, ) - cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = link_strategy != LinkStrategy("static")) - sub_targets[ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compile_cmd_output.argsfiles.relative, "argsfiles")] - sub_targets[ABS_ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compile_cmd_output.argsfiles.absolute, "abs-argsfiles")] + sub_targets[ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compile_cmd_output.argsfiles.relative, ARGSFILES_SUBTARGET)] + sub_targets[XCODE_ARGSFILES_SUB_TARGET] = [get_argsfiles_output(ctx, compile_cmd_output.argsfiles.xcode, XCODE_ARGSFILES_SUB_TARGET)] sub_targets[OBJECTS_SUBTARGET] = [DefaultInfo(sub_targets = cxx_objects_sub_targets(cxx_outs))] + diagnostics = { + compile_cmd.src.short_path: out.diagnostics + for compile_cmd, out in zip(compile_cmd_output.src_compile_cmds, cxx_outs) + if out.diagnostics != None + } + if len(diagnostics) > 0: + sub_targets["check"] = check_sub_target(ctx, diagnostics) + # Compilation DB. comp_db = create_compilation_database(ctx, compile_cmd_output.src_compile_cmds, "compilation-database") sub_targets["compilation-database"] = [comp_db] @@ -232,6 +259,9 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # comp_db_compile_cmds can include header files being compiled as C++ which should not be exposed in the [compilation-database] subtarget comp_db_info = make_compilation_db_info(compile_cmd_output.comp_db_compile_cmds, get_cxx_toolchain_info(ctx), get_cxx_platform_info(ctx)) + # Index Stores created by cxx compile + index_stores = [out.index_store for out in cxx_outs if out.index_store] + # Link deps link_deps = linkables(cxx_attr_deps(ctx)) + impl_params.extra_link_deps @@ -281,8 +311,13 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # Link group libs. link_group_libs = {} + + # Target label to which link group it was included + targets_consumed_by_link_groups = {} auto_link_groups = {} - labels_to_links_map = {} + labels_to_links = FinalLabelsToLinks( + map = {}, + ) if not link_group_mappings: # We cannot support deriving link execution preference off the included links, as we've already @@ -312,27 +347,39 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # If we're using auto-link-groups, where we generate the link group links # in the prelude, the link group map will give us the link group libs. # Otherwise, pull them from the `LinkGroupLibInfo` provider from out deps. + + public_link_group_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_mappings, + exec_dep_roots + link_group_extra_link_roots, + link_group, + ) + link_group_libs_debug_info = {} if impl_params.auto_link_group_specs != None: linked_link_groups = create_link_groups( ctx = ctx, link_groups = link_groups, + link_strategy = link_strategy, link_group_mappings = link_group_mappings, link_group_preferred_linkage = link_group_preferred_linkage, executable_deps = exec_dep_roots, linker_flags = own_link_flags, link_group_specs = impl_params.auto_link_group_specs, - root_link_group = link_group, linkable_graph_node_map = linkable_graph_node_map, other_roots = link_group_extra_link_roots, prefer_stripped_objects = impl_params.prefer_stripped_objects, anonymous = ctx.attrs.anonymous_link_groups, allow_cache_upload = impl_params.exe_allow_cache_upload, + public_nodes = public_link_group_nodes, + error_handler = impl_params.error_handler, ) + link_group_libs_debug_info = linked_link_groups.libs_debug_info for name, linked_link_group in linked_link_groups.libs.items(): auto_link_groups[name] = linked_link_group.artifact if linked_link_group.library != None: link_group_libs[name] = linked_link_group.library own_binary_link_flags += linked_link_groups.symbol_ldflags + targets_consumed_by_link_groups = linked_link_groups.targets_consumed_by_link_groups else: # NOTE(agallagher): We don't use version scripts and linker scripts @@ -347,7 +394,8 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # TODO(T110378098): Similar to shared libraries, we need to identify all the possible # scenarios for which we need to propagate up link info and simplify this logic. For now # base which links to use based on whether link groups are defined. - labels_to_links_map = get_filtered_labels_to_links_map( + labels_to_links = get_filtered_labels_to_links_map( + public_link_group_nodes, linkable_graph_node_map, link_group, link_groups, @@ -373,10 +421,24 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, force_static_follows_dependents = impl_params.link_groups_force_static_follows_dependents, ) + link_groups_debug_info = LinkGroupsDebugLinkInfo( + binary = LinkGroupsDebugLinkableItem( + ordered_linkables = create_debug_linkable_entries(labels_to_links.map, root = None), + ), + libs = link_group_libs_debug_info, + ) + sub_targets["link-groups-info"] = [DefaultInfo( + default_output = ctx.actions.write_json( + ctx.label.name + ".link-groups-info.json", + link_groups_debug_info, + ), + )] + if is_cxx_test and link_group != None: # if a cpp_unittest is part of the link group, we need to traverse through all deps # from the root again to ensure we link in gtest deps - labels_to_links_map = labels_to_links_map | get_filtered_labels_to_links_map( + labels_to_links_to_merge = get_filtered_labels_to_links_map( + public_link_group_nodes, linkable_graph_node_map, None, link_groups, @@ -388,6 +450,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, is_executable_link = True, prefer_stripped = impl_params.prefer_stripped_objects, ) + labels_to_links.map |= labels_to_links_to_merge.map # NOTE: Our Haskell DLL support impl currently links transitive haskell # deps needed by DLLs which get linked into the main executable as link- @@ -398,18 +461,13 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, public_nodes = get_transitive_deps_matching_labels( linkable_graph_node_map = linkable_graph_node_map, label = ctx.attrs.link_group_public_deps_label, - roots = [ - mapping.root - for group in link_group_info.groups.values() - for mapping in group.mappings - if mapping.root != None - ], + roots = get_dedupped_roots_from_groups(link_group_info.groups.values()), ) - filtered_links = get_filtered_links(labels_to_links_map, set(public_nodes)) - filtered_targets = get_filtered_targets(labels_to_links_map) + filtered_links = get_filtered_links(labels_to_links.map, set(public_nodes)) + filtered_targets = get_filtered_targets(labels_to_links.map) - link_execution_preference = get_resolved_cxx_binary_link_execution_preference(ctx, labels_to_links_map.keys(), impl_params.force_full_hybrid_if_capable) + link_execution_preference = get_resolved_cxx_binary_link_execution_preference(ctx, labels_to_links.map.keys(), impl_params.force_full_hybrid_if_capable) # Unfortunately, link_groups does not use MergedLinkInfo to represent the args # for the resolved nodes in the graph. @@ -421,15 +479,8 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, dep_links = LinkArgs(infos = filtered_links) sub_targets[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [get_link_group_map_json(ctx, filtered_targets)] - # Set up shared libraries symlink tree only when needed - shared_libs = {} - - # Add in extra, rule-specific shared libs. - for name, shlib in impl_params.extra_shared_libs.items(): - shared_libs[name] = shlib.lib - # Only setup a shared library symlink tree when shared linkage or link_groups is used - gnu_use_link_groups = cxx_is_gnu(ctx) and link_group_mappings + gnu_use_link_groups = cxx_is_gnu(ctx) and len(link_group_mappings) > 0 shlib_deps = [] if link_strategy == LinkStrategy("shared") or gnu_use_link_groups: shlib_deps = ( @@ -443,26 +494,25 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, link_group_mappings = link_group_mappings, link_group_libs = link_group_libs, link_group_preferred_linkage = link_group_preferred_linkage, - labels_to_links_map = labels_to_links_map, + labels_to_links_map = labels_to_links.map, + targets_consumed_by_link_groups = targets_consumed_by_link_groups, ) - def shlib_filter(_name, shared_lib): - return not gnu_use_link_groups or is_link_group_shlib(shared_lib.label, link_group_ctx) - - for name, shared_lib in traverse_shared_library_info(shlib_info, filter_func = shlib_filter).items(): - shared_libs[name] = shared_lib.lib - - if gnu_use_link_groups: - # When there are no matches for a pattern based link group, - # `link_group_mappings` will not have an entry associated with the lib. - for _name, link_group_lib in link_group_libs.items(): - shared_libs.update(link_group_lib.shared_libs) + # Set up shared libraries symlink tree only when needed + shared_libs = build_shared_libs_for_symlink_tree( + gnu_use_link_groups, + link_group_ctx, + link_strategy, + traverse_shared_library_info(shlib_info), + impl_params.extra_shared_libs, + ) toolchain_info = get_cxx_toolchain_info(ctx) linker_info = toolchain_info.linker_info links = [ LinkArgs(infos = [ LinkInfo( + dist_thin_lto_codegen_flags = getattr(ctx.attrs, "dist_thin_lto_codegen_flags", []), pre_flags = own_binary_link_flags, linkables = [ObjectsLinkable( objects = [out.object for out in cxx_outs] + impl_params.extra_link_input, @@ -487,14 +537,14 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # hidden link args. if impl_params.extra_hidden: links.append( - LinkArgs(flags = cmd_args().hidden(impl_params.extra_hidden)), + LinkArgs(flags = cmd_args(hidden = impl_params.extra_hidden)), ) link_result = _link_into_executable( ctx, # If shlib lib tree generation is enabled, pass in the shared libs (which # will trigger the necessary link tree and link args). - shared_libs if impl_params.exe_shared_libs_link_tree else {}, + shared_libs if impl_params.exe_shared_libs_link_tree else [], impl_params.executable_name, linker_info.binary_extension, link_options( @@ -507,6 +557,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, strip_args_factory = impl_params.strip_args_factory, category_suffix = impl_params.exe_category_suffix, allow_cache_upload = impl_params.exe_allow_cache_upload, + error_handler = impl_params.error_handler, ), ) binary = link_result.exe @@ -521,7 +572,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, output = binary.output, populate_rule_specific_attributes_func = impl_params.cxx_populate_xcode_attributes_func, srcs = impl_params.srcs + impl_params.additional.srcs, - argsfiles = compile_cmd_output.argsfiles.absolute, + argsfiles = compile_cmd_output.argsfiles.xcode, product_name = get_cxx_executable_product_name(ctx), ) sub_targets[XCODE_DATA_SUB_TARGET] = xcode_data_default_info @@ -534,34 +585,30 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, sub_targets["rpath-tree"] = [DefaultInfo( default_output = shared_libs_symlink_tree, other_outputs = [ - lib.output - for lib in shared_libs.values() + shlib.lib.output + for shlib in shared_libs ] + [ - lib.dwp - for lib in shared_libs.values() - if lib.dwp + shlib.lib.dwp + for shlib in shared_libs + if shlib.lib.dwp ], )] - sub_targets["shared-libraries"] = [DefaultInfo( - default_output = ctx.actions.write_json( - binary.output.basename + ".shared-libraries.json", - { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, name) for name in shared_libs.keys()], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, name) for name, lib in shared_libs.items() if lib.dwp], - "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if shared_libs_symlink_tree else [], - }, - ), - sub_targets = { - name: [DefaultInfo( - default_output = lib.output, - sub_targets = {"dwp": [DefaultInfo(default_output = lib.dwp)]} if lib.dwp else {}, - )] - for name, lib in shared_libs.items() - }, - )] + + # TODO(agallagher) There appears to be pre-existing soname conflicts + # when building this (when using link groups), which prevents using + # `with_unique_str_sonames`. + str_soname_shlibs = { + shlib.soname.ensure_str(): shlib + for shlib in shared_libs + if shlib.soname.is_str + } + + readable_mappings = {} + soname_to_group_mappings = {} if link_group_mappings: - readable_mappings = {} for node, group in link_group_mappings.items(): + soname = get_shared_library_name(linker_info, group, True) + soname_to_group_mappings[soname] = group readable_mappings[group] = readable_mappings.get(group, []) + ["{}//{}:{}".format(node.cell, node.package, node.name)] sub_targets[LINK_GROUP_MAPPINGS_SUB_TARGET] = [DefaultInfo( @@ -581,6 +628,43 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, ), )] + shared_libraries_sub_targets = {} + for soname, shlib in str_soname_shlibs.items(): + targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {} + + group = soname_to_group_mappings.get(soname) + if group in readable_mappings: + output_json_file = binary.output.basename + "." + group + LINK_GROUP_MAPPINGS_FILENAME_SUFFIX + targets[LINK_GROUP_MAPPINGS_SUB_TARGET] = [DefaultInfo( + default_output = ctx.actions.write_json( + output_json_file, + {group: readable_mappings[group]}, + ), + )] + shared_libraries_sub_targets[soname] = [DefaultInfo( + default_output = shlib.lib.output, + sub_targets = targets, + )] + + sub_targets["shared-libraries"] = [DefaultInfo( + default_output = ctx.actions.write_json( + binary.output.basename + ".shared-libraries.json", + { + "libraries": [ + "{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, soname) + for soname in str_soname_shlibs + ], + "librariesdwp": [ + "{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, soname) + for soname, shlib in str_soname_shlibs.items() + if shlib.lib.dwp + ], + "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if shared_libs_symlink_tree else [], + }, + ), + sub_targets = shared_libraries_sub_targets, + )] + # If we have some resources, write it to the resources JSON file and add # it and all resources to "runtime_files" so that we make to materialize # them with the final binary. @@ -601,8 +685,17 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, runtime_files.extend(resource.other_outputs) if binary.dwp: - # A `dwp` sub-target which generates the `.dwp` file for this binary. - sub_targets["dwp"] = [DefaultInfo(default_output = binary.dwp)] + # A `dwp` sub-target which generates the `.dwp` file for this binary and its shared lib dependencies. + sub_targets["dwp"] = [ + DefaultInfo( + default_output = binary.dwp, + other_outputs = [ + shlib.lib.dwp + for shlib in shared_libs + if shlib.lib.dwp + ], + ), + ] if binary.pdb: # A `pdb` sub-target which generates the `.pdb` file for this binary. @@ -644,7 +737,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, actions = ctx.actions, children = ( [binary.external_debug_info] + - [s.external_debug_info for s in shared_libs.values()] + + [s.lib.external_debug_info for s in shared_libs] + impl_params.additional.static_external_debug_info ), ) @@ -657,6 +750,14 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, sub_targets["debuginfo"] = [DefaultInfo( default_output = materialize_external_debug_info, )] + sub_targets["debug_coverage_instrumentation"] = [DefaultInfo( + default_output = materialize_external_debug_info, + )] + + sub_targets["exe"] = [DefaultInfo( + default_output = binary.output, + other_outputs = runtime_files, + )] for additional_subtarget, subtarget_providers in impl_params.additional.subtargets.items(): sub_targets[additional_subtarget] = subtarget_providers @@ -681,6 +782,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, nondebug_runtime_files = runtime_files, ), sanitizer_runtime_files = link_result.sanitizer_runtime_files, + index_stores = index_stores, ) _CxxLinkExecutableResult = record( @@ -702,7 +804,7 @@ _CxxLinkExecutableResult = record( def _link_into_executable( ctx: AnalysisContext, - shared_libs: dict[str, LinkedObject], + shared_libs: list[SharedLibrary], executable_name: [str, None], binary_extension: str, opts: LinkOptions) -> _CxxLinkExecutableResult: diff --git a/prelude/cxx/cxx_instrumentation.bzl b/prelude/cxx/cxx_instrumentation.bzl new file mode 100644 index 00000000000..4c3325e1aa2 --- /dev/null +++ b/prelude/cxx/cxx_instrumentation.bzl @@ -0,0 +1,39 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +def needs_instrumentation(children: list[bool], contains_headers_selected_for_coverage_instrumentation: bool) -> bool: + return contains_headers_selected_for_coverage_instrumentation or any(children) + +CxxExportedNeedsCoverageInstrumentationTSet = transitive_set( + reductions = { + "needs_instrumentation": needs_instrumentation, + }, +) + +CxxExportedNeedsCoverageInstrumentation = provider(fields = { + "nodes": CxxExportedNeedsCoverageInstrumentationTSet, +}) + +def build_needs_coverage_tset(ctx: AnalysisContext, deps: list[Dependency]) -> CxxExportedNeedsCoverageInstrumentationTSet: + return ctx.actions.tset( + CxxExportedNeedsCoverageInstrumentationTSet, + value = ctx.attrs.exported_needs_coverage_instrumentation if hasattr(ctx.attrs, "exported_needs_coverage_instrumentation") else False, + children = [d.get(CxxExportedNeedsCoverageInstrumentation).nodes for d in deps if d.get(CxxExportedNeedsCoverageInstrumentation) != None], + ) + +def build_exported_needs_coverage(ctx: AnalysisContext, deps: list[Dependency]) -> CxxExportedNeedsCoverageInstrumentation: + return CxxExportedNeedsCoverageInstrumentation( + nodes = build_needs_coverage_tset(ctx, deps), + ) + +def is_coverage_enabled_by_any_dep(ctx: AnalysisContext, deps: list[Dependency]) -> bool: + tset = build_needs_coverage_tset(ctx, deps) + + return tset.reduce("needs_instrumentation") + +def needs_coverage(cxx_exported_needs_coverage: CxxExportedNeedsCoverageInstrumentation) -> bool: + return cxx_exported_needs_coverage.nodes.reduce("needs_instrumentation") diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 15663206c45..bc9846212dd 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -27,16 +27,14 @@ load( "apple_get_link_info_by_deduping_link_infos", ) load("@prelude//apple:resource_groups.bzl", "create_resource_graph") -load( - "@prelude//apple:xcode.bzl", - "get_project_root_file", -) load( "@prelude//apple/swift:swift_runtime.bzl", "create_swift_runtime_linkable", ) +load("@prelude//cxx:headers.bzl", "cxx_attr_exported_headers") load( "@prelude//ide_integrations:xcode.bzl", + "XCODE_ARGSFILES_SUB_TARGET", "XCODE_DATA_SUB_TARGET", "XcodeDataInfo", "generate_xcode_data", @@ -44,6 +42,7 @@ load( load( "@prelude//java:java_providers.bzl", "get_java_packaging_info", + "propagate_global_code_info", ) load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference", "get_link_execution_preference") load( @@ -64,7 +63,6 @@ load( "LinkInfos", "LinkOrdering", "LinkStrategy", - "Linkage", "LinkedObject", # @unused Used as a type "ObjectsLinkable", "SharedLibLinkable", @@ -95,6 +93,12 @@ load( ) load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "create_shared_libraries", "merge_shared_libraries") load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") +load( + "@prelude//third-party:build.bzl", + "create_third_party_build_info", +) +load("@prelude//unix:providers.bzl", "UnixEnv", "create_unix_env_info") load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:lazy.bzl", "lazy") @@ -111,7 +115,6 @@ load( load(":archive.bzl", "make_archive") load( ":argsfiles.bzl", - "ABS_ARGSFILES_SUBTARGET", "ARGSFILES_SUBTARGET", "get_argsfiles_output", ) @@ -125,11 +128,20 @@ load( load( ":compile.bzl", "CxxCompileCommandOutput", + "CxxCompileFlavor", "CxxCompileOutput", # @unused Used as a type + "CxxSrcCompileCommand", "compile_cxx", "create_compile_cmds", + "cxx_objects_sub_targets", + "precompile_cxx", ) load(":cxx_context.bzl", "get_cxx_platform_info", "get_cxx_toolchain_info") +load( + ":cxx_instrumentation.bzl", + "build_exported_needs_coverage", + "needs_coverage", +) load( ":cxx_library_utility.bzl", "OBJECTS_SUBTARGET", @@ -142,15 +154,24 @@ load( "cxx_attr_resources", "cxx_inherited_link_info", "cxx_is_gnu", - "cxx_objects_sub_targets", "cxx_platform_supported", "cxx_use_shlib_intfs", + "cxx_use_shlib_intfs_mode", +) +load( + ":cxx_toolchain_types.bzl", + "LinkerType", + "ShlibInterfacesMode", + "is_bitcode_format", ) -load(":cxx_toolchain_types.bzl", "is_bitcode_format") load( ":cxx_types.bzl", + "CxxLibraryInfo", "CxxRuleConstructorParams", # @unused Used as a type ) +load(":diagnostics.bzl", "check_sub_target") +load(":gcno.bzl", "GcnoFilesInfo") +load(":index_store.bzl", "create_index_store_subtargets_and_provider") load( ":link.bzl", "CxxLinkResult", # @unused Used as a type @@ -195,6 +216,11 @@ load( ) load( ":shared_library_interface.bzl", + "SharedInterfaceInfo", # @unused Used as a type + "create_shared_interface_info", + "create_shared_interface_info_with_children", + "generate_exported_symbols", + "generate_tbd_with_symbols", "shared_library_interface", ) @@ -224,13 +250,13 @@ CxxLibraryOutput = record( # its corresponding DWARF debug info. # May be None when Split DWARF is disabled, for static/static-pic libraries, # for some types of synthetic link objects or for pre-built shared libraries. - dwp = field([Artifact, None], None), + dwp = field(Artifact | None, None), # A shared shared library may have an associated PDB file with # its corresponding Windows debug info. - pdb = field([Artifact, None], None), + pdb = field(Artifact | None, None), # The import library is the linkable output of a Windows shared library build. - implib = field([Artifact, None], None), + implib = field(Artifact | None, None), # Data about the linker map, only available on shared libraries # TODO(cjhopman): always available? when is it/is it not available? linker_map = field([CxxLinkerMapData, None], None), @@ -273,6 +299,7 @@ _CxxLibraryCompileOutput = record( bitcode_objects = field([list[Artifact], None]), # yaml file with optimization remarks about clang compilation clang_remarks = field([list[Artifact], None]), + gcno_files = field([list[Artifact], None]), # json file with trace information about clang compilation clang_traces = field(list[Artifact]), # Externally referenced debug info, which doesn't get linked with the @@ -283,6 +310,10 @@ _CxxLibraryCompileOutput = record( objects_have_external_debug_info = field(bool), # sub_target for each object objects_sub_targets = field(dict[str, list[DefaultInfo]]), + # the generated index stores + index_stores = field(list[Artifact]), + # diagnostics produced by a typecheck-only build (-fsyntax-only) + diagnostics = field(dict[str, Artifact]), ) # The output of compiling all the source files in the library, containing @@ -292,8 +323,12 @@ _CxxCompiledSourcesOutput = record( compile_cmds = field(CxxCompileCommandOutput), # PIC compile outputs pic = field(_CxxLibraryCompileOutput), + # PIC optimized compile outputs + pic_optimized = field([_CxxLibraryCompileOutput, None]), # Non PIC compile outputs non_pic = field([_CxxLibraryCompileOutput, None]), + # Header unit outputs + header_unit_preprocessors = field(list[CPreprocessor]), ) # The outputs of a cxx_library_parameterized rule. @@ -347,8 +382,6 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # TODO(T110378095) right now we implement reexport of exported_* flags manually, we should improve/automate that in the macro layer - project_root_file = get_project_root_file(ctx) - # Gather preprocessor inputs. (own_non_exported_preprocessor_info, test_preprocessor_infos) = cxx_private_preprocessor_info( ctx = ctx, @@ -356,10 +389,10 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc extra_preprocessors = impl_params.extra_preprocessors, non_exported_deps = non_exported_deps, is_test = impl_params.is_test, - project_root_file = project_root_file, ) - own_exported_preprocessor_info = cxx_exported_preprocessor_info(ctx, impl_params.headers_layout, project_root_file, impl_params.extra_exported_preprocessors) + own_exported_preprocessor_info = cxx_exported_preprocessor_info(ctx, impl_params.headers_layout, impl_params.extra_exported_preprocessors) own_preprocessors = [own_non_exported_preprocessor_info, own_exported_preprocessor_info] + test_preprocessor_infos + own_exported_preprocessors = [own_exported_preprocessor_info] inherited_non_exported_preprocessor_infos = cxx_inherited_preprocessor_infos( non_exported_deps + filter(None, [ctx.attrs.precompiled_header]), @@ -368,18 +401,23 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc preferred_linkage = cxx_attr_preferred_linkage(ctx) + exported_needs_coverage = build_exported_needs_coverage(ctx, exported_deps + non_exported_deps) compiled_srcs = cxx_compile_srcs( ctx = ctx, impl_params = impl_params, own_preprocessors = own_preprocessors, + own_exported_preprocessors = own_exported_preprocessors, inherited_non_exported_preprocessor_infos = inherited_non_exported_preprocessor_infos, inherited_exported_preprocessor_infos = inherited_exported_preprocessor_infos, preferred_linkage = preferred_linkage, + add_coverage_instrumentation_compiler_flags = needs_coverage(exported_needs_coverage), ) sub_targets = {} providers = [] + providers.append(exported_needs_coverage) + if len(ctx.attrs.tests) > 0 and impl_params.generate_providers.preprocessor_for_tests: providers.append( CPreprocessorForTestsInfo( @@ -389,8 +427,8 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc ) if impl_params.generate_sub_targets.argsfiles: - sub_targets[ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compiled_srcs.compile_cmds.argsfiles.relative, "argsfiles")] - sub_targets[ABS_ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compiled_srcs.compile_cmds.argsfiles.absolute, "abs-argsfiles")] + sub_targets[ARGSFILES_SUBTARGET] = [get_argsfiles_output(ctx, compiled_srcs.compile_cmds.argsfiles.relative, ARGSFILES_SUBTARGET)] + sub_targets[XCODE_ARGSFILES_SUB_TARGET] = [get_argsfiles_output(ctx, compiled_srcs.compile_cmds.argsfiles.xcode, XCODE_ARGSFILES_SUB_TARGET)] if impl_params.generate_sub_targets.clang_remarks: if compiled_srcs.non_pic and compiled_srcs.non_pic.clang_remarks: @@ -419,6 +457,8 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc if compiled_srcs.non_pic: objects_sub_targets = objects_sub_targets | compiled_srcs.non_pic.objects_sub_targets sub_targets[OBJECTS_SUBTARGET] = [DefaultInfo(sub_targets = objects_sub_targets)] + if len(compiled_srcs.pic.diagnostics) > 0: + sub_targets["check"] = check_sub_target(ctx, compiled_srcs.pic.diagnostics) # Compilation DB. if impl_params.generate_sub_targets.compilation_database: @@ -435,6 +475,22 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc comp_db_info = make_compilation_db_info(compiled_srcs.compile_cmds.comp_db_compile_cmds, get_cxx_toolchain_info(ctx), get_cxx_platform_info(ctx)) providers.append(comp_db_info) + # Shared library interfaces are partial lists of exported symbols that are merged at link time. + exported_symbol_outputs = impl_params.extra_shared_library_interfaces if impl_params.extra_shared_library_interfaces else [] + if cxx_use_shlib_intfs_mode(ctx, ShlibInterfacesMode("stub_from_headers")): + transitive_pp = inherited_exported_preprocessor_infos + if _attr_reexport_all_header_dependencies(ctx): + transitive_pp += inherited_non_exported_preprocessor_infos + + cxx_exported_symbols = generate_exported_symbols( + ctx, + cxx_attr_exported_headers(ctx, impl_params.headers_layout), + own_exported_preprocessor_info, + transitive_pp, + ) + exported_symbol_outputs.append(cxx_exported_symbols) + sub_targets["exported-symbols"] = [DefaultInfo(default_outputs = exported_symbol_outputs)] + # Link Groups link_group = get_link_group(ctx) link_group_info = get_link_group_info(ctx) @@ -464,7 +520,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc frameworks_linkable = apple_create_frameworks_linkable(ctx) swiftmodule_linkable = impl_params.swiftmodule_linkable swift_runtime_linkable = create_swift_runtime_linkable(ctx) - dep_infos, link_group_map, link_execution_preference = _get_shared_library_links( + dep_infos, link_group_map, link_execution_preference, shared_interface_info = _get_shared_library_links( ctx, get_linkable_graph_node_map_func(deps_linkable_graph), link_group, @@ -478,6 +534,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc swiftmodule_linkable, force_static_follows_dependents = impl_params.link_groups_force_static_follows_dependents, swift_runtime_linkable = swift_runtime_linkable, + exported_symbol_outputs = exported_symbol_outputs, ) if impl_params.generate_sub_targets.link_group_map and link_group_map: sub_targets[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [link_group_map] @@ -499,8 +556,10 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc extra_static_linkables = extra_static_linkables, gnu_use_link_groups = cxx_is_gnu(ctx) and bool(link_group_mappings), link_execution_preference = link_execution_preference, + shared_interface_info = shared_interface_info, ) solib_as_dict = {library_outputs.solib[0]: library_outputs.solib[1]} if library_outputs.solib else {} + shared_libs = create_shared_libraries(ctx, solib_as_dict) for _, link_style_output in library_outputs.outputs.items(): for key in link_style_output.sub_targets.keys(): @@ -533,6 +592,11 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # Add any subtargets for this output style. output_style_sub_targets.update(output.sub_targets) + # TBD outputs are collected for each link unit, so propagate whenever + # a library is being linked statically. + if output_style != LibOutputStyle("shared_lib") and shared_interface_info != None: + output_style_providers.append(shared_interface_info) + if impl_params.generate_sub_targets.link_style_outputs: if output: sub_targets[subtarget_for_output_style(output_style)] = [DefaultInfo( @@ -571,7 +635,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc output = default_output.default if default_output else None, populate_rule_specific_attributes_func = impl_params.cxx_populate_xcode_attributes_func, srcs = impl_params.srcs + impl_params.additional.srcs, - argsfiles = compiled_srcs.compile_cmds.argsfiles.absolute, + argsfiles = compiled_srcs.compile_cmds.argsfiles.xcode, product_name = get_default_cxx_library_product_name(ctx, impl_params), ) sub_targets[XCODE_DATA_SUB_TARGET] = xcode_data_default_info @@ -608,15 +672,71 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc if impl_params.generate_providers.shared_libraries: providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solib_as_dict), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in non_exported_deps]) + filter(None, [x.get(SharedLibraryInfo) for x in exported_deps]), )) + providers.append( + create_unix_env_info( + actions = ctx.actions, + env = UnixEnv( + label = ctx.label, + native_libs = [shared_libs], + ), + deps = exported_deps + non_exported_deps, + ), + ) propagated_preprocessor_merge_list = inherited_exported_preprocessor_infos if _attr_reexport_all_header_dependencies(ctx): propagated_preprocessor_merge_list = inherited_non_exported_preprocessor_infos + propagated_preprocessor_merge_list - propagated_preprocessor = cxx_merge_cpreprocessors(ctx, [own_exported_preprocessor_info], propagated_preprocessor_merge_list) + + # Header unit PCM. + if impl_params.generate_sub_targets.header_unit: + if compiled_srcs.header_unit_preprocessors: + header_unit_preprocessors = [] + header_unit_sub_targets = [] + for x in compiled_srcs.header_unit_preprocessors: + header_unit_preprocessors.append(x) + header_unit_sub_targets.append([ + DefaultInfo(default_outputs = [h.module for h in x.header_units]), + cxx_merge_cpreprocessors( + ctx, + own_exported_preprocessors + header_unit_preprocessors, + propagated_preprocessor_merge_list, + ), + ]) + sub_targets["header-unit"] = [ + DefaultInfo( + default_outputs = [ + h.module + for x in header_unit_preprocessors + for h in x.header_units + ], + sub_targets = { + str(i): x + for i, x in enumerate(header_unit_sub_targets) + }, + ), + header_unit_sub_targets[-1][1], + ] + if impl_params.export_header_unit: + own_exported_preprocessors.extend(header_unit_preprocessors) + else: + sub_targets["header-unit"] = [ + DefaultInfo(), + cxx_merge_cpreprocessors( + ctx, + own_exported_preprocessors, + propagated_preprocessor_merge_list, + ), + ] + + propagated_preprocessor = cxx_merge_cpreprocessors( + ctx, + own_exported_preprocessors, + propagated_preprocessor_merge_list, + ) if impl_params.generate_providers.preprocessors: providers.append(propagated_preprocessor) @@ -624,6 +744,23 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc propagated_exported_preprocessor_info = propagated_preprocessor if impl_params.rule_type == "apple_library" and ctx.attrs.modular else None additional_providers = impl_params.additional.additional_providers_factory(propagated_exported_preprocessor_info) if impl_params.additional.additional_providers_factory else [] + if impl_params.generate_providers.third_party_build: + third_party_build_info = create_third_party_build_info( + ctx = ctx, + cxx_headers = [propagated_preprocessor], + shared_libs = shared_libs.libraries, + deps = exported_deps + non_exported_deps, + ) + providers.append(third_party_build_info) + sub_targets["third-party-build"] = [ + DefaultInfo( + default_output = third_party_build_info.build.root.artifact, + sub_targets = dict( + manifest = [DefaultInfo(default_output = third_party_build_info.build.manifest)], + ), + ), + ] + # For v1's `#headers` functionality. if impl_params.generate_sub_targets.headers: sub_targets["headers"] = [propagated_preprocessor, create_merged_link_info( @@ -651,6 +788,16 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc for additional_subtarget, subtarget_providers in impl_params.additional.subtargets.items(): sub_targets[additional_subtarget] = subtarget_providers + # Index store from swift compile + index_stores = impl_params.index_stores if impl_params.index_stores else [] + + # Index stores from cxx compile. We only generate the index store for pic + if compiled_srcs.pic: + index_stores.extend(compiled_srcs.pic.index_stores) + index_store_subtargets, index_store_info = create_index_store_subtargets_and_provider(ctx, index_stores, non_exported_deps + exported_deps) + sub_targets.update(index_store_subtargets) + providers.append(index_store_info) + linker_flags = cxx_attr_linker_flags_all(ctx) # Omnibus root provider. @@ -662,6 +809,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc soname = None linker_type = get_cxx_toolchain_info(ctx).linker_info.type linkable_root = create_linkable_root( + label = ctx.label, name = soname, link_infos = LinkInfos( default = LinkInfo( @@ -717,9 +865,11 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # that omnibus knows to avoid it. include_in_android_mergemap = getattr(ctx.attrs, "include_in_android_merge_map_output", True) and default_output != None, link_infos = library_outputs.link_infos, - shared_libs = solib_as_dict, + shared_libs = shared_libs, linker_flags = linker_flags, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, + # We don't want to propagate shared interaces across shared library boundaries. + shared_interface_info = None if preferred_linkage == Linkage("shared") else create_shared_interface_info(ctx, exported_symbol_outputs, []), ), excluded = {ctx.label: None} if not value_or(ctx.attrs.supports_merged_linking, True) else {}, ), @@ -752,26 +902,28 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # Some rules, e.g. fbcode//thrift/lib/cpp:thrift-core-module # define preprocessor flags as things like: -DTHRIFT_PLATFORM_CONFIG= # and unless they get quoted, they break shell syntax. - cxx_preprocessor_flags = cmd_args() cxx_compiler_info = get_cxx_toolchain_info(ctx).cxx_compiler_info - cxx_preprocessor_flags.add(cmd_args(cxx_compiler_info.preprocessor_flags or [], quote = "shell")) - cxx_preprocessor_flags.add(cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell")) - cxx_preprocessor_flags.add(propagated_preprocessor.set.project_as_args("include_dirs")) + cxx_preprocessor_flags = cmd_args( + cmd_args(cxx_compiler_info.preprocessor_flags or [], quote = "shell"), + cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell"), + propagated_preprocessor.set.project_as_args("include_dirs"), + ) templ_vars["cxxppflags"] = cxx_preprocessor_flags - c_preprocessor_flags = cmd_args() c_compiler_info = get_cxx_toolchain_info(ctx).c_compiler_info - c_preprocessor_flags.add(cmd_args(c_compiler_info.preprocessor_flags or [], quote = "shell")) - c_preprocessor_flags.add(cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell")) - c_preprocessor_flags.add(propagated_preprocessor.set.project_as_args("include_dirs")) + c_preprocessor_flags = cmd_args( + cmd_args(c_compiler_info.preprocessor_flags or [], quote = "shell"), + cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell"), + propagated_preprocessor.set.project_as_args("include_dirs"), + ) templ_vars["cppflags"] = c_preprocessor_flags # Add in ldflag macros. for link_strategy in (LinkStrategy("static"), LinkStrategy("static_pic")): name = "ldflags-" + link_strategy.value.replace("_", "-") - args = cmd_args() + args = [] linker_info = get_cxx_toolchain_info(ctx).linker_info - args.add(linker_info.linker_flags or []) + args.append(linker_info.linker_flags or []) # Normally, we call get_link_args_for_strategy for getting the args for our own link from our # deps. This case is a bit different as we are effectively trying to get the args for how this library @@ -781,8 +933,8 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc [merged_native_link_info], link_strategy, ) - args.add(unpack_link_args(link_args)) - templ_vars[name] = args + args.append(unpack_link_args(link_args)) + templ_vars[name] = cmd_args(args) # TODO(T110378127): To implement `$(ldflags-shared ...)` properly, we'd need # to setup a symink tree rule for all transitive shared libs. Since this @@ -801,6 +953,9 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc if impl_params.generate_providers.java_packaging_info: providers.append(get_java_packaging_info(ctx, non_exported_deps + exported_deps)) + if impl_params.generate_providers.java_global_code_info: + providers.append(propagate_global_code_info(ctx, ctx.attrs.deps + ctx.attrs.exported_deps)) + # TODO(T107163344) this shouldn't be in cxx_library itself, use overlays to remove it. if impl_params.generate_providers.android_packageable_info: providers.append(merge_android_packageable_info(ctx.label, ctx.actions, non_exported_deps + exported_deps)) @@ -817,6 +972,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc pass default_output = unknown() + default_info = DefaultInfo( default_output = default_output.default if default_output != None else None, other_outputs = default_output.other if default_output != None else [], @@ -831,12 +987,20 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc merge_link_group_lib_info( label = ctx.label, name = link_group, - shared_libs = solib_as_dict, + shared_libs = shared_libs, shared_link_infos = library_outputs.link_infos.get(LibOutputStyle("shared_lib")), deps = exported_deps + non_exported_deps, ), ) + if getattr(ctx.attrs, "_meta_apple_library_validation_enabled", False): + providers.append( + CxxLibraryInfo( + target = ctx.label, + labels = ctx.attrs.labels, + ), + ) + return _CxxLibraryParameterizedOutput( default_output = default_output, all_outputs = library_outputs, @@ -859,9 +1023,13 @@ def get_default_cxx_library_product_name(ctx, impl_params) -> str: if output_style == LibOutputStyle("shared_lib"): return _soname(ctx, impl_params) else: - return _base_static_library_name(ctx, False) + return _base_static_library_name(ctx, optimized = False, stripped = False) -def _get_library_compile_output(ctx, outs: list[CxxCompileOutput], extra_link_input) -> _CxxLibraryCompileOutput: +def _get_library_compile_output( + ctx: AnalysisContext, + src_compile_cmds: list[CxxSrcCompileCommand], + outs: list[CxxCompileOutput], + extra_link_input: list[Artifact]) -> _CxxLibraryCompileOutput: objects = [out.object for out in outs] stripped_objects = _strip_objects(ctx, objects) @@ -879,15 +1047,30 @@ def _get_library_compile_output(ctx, outs: list[CxxCompileOutput], extra_link_in objects += extra_link_input stripped_objects += extra_link_input + index_stores = [ + out.index_store + for out in outs + if out.index_store + ] + + diagnostics = { + compile_cmd.src.short_path: out.diagnostics + for compile_cmd, out in zip(src_compile_cmds, outs) + if out.diagnostics != None + } + return _CxxLibraryCompileOutput( objects = objects, stripped_objects = stripped_objects, bitcode_objects = bitcode_objects, clang_traces = [out.clang_trace for out in outs if out.clang_trace != None], clang_remarks = [out.clang_remarks for out in outs if out.clang_remarks != None], + gcno_files = [out.gcno_file for out in outs if out.gcno_file != None], external_debug_info = [out.external_debug_info for out in outs if out.external_debug_info != None], objects_have_external_debug_info = lazy.is_any(lambda out: out.object_has_external_debug_info, outs), objects_sub_targets = objects_sub_targets, + index_stores = index_stores, + diagnostics = diagnostics, ) def cxx_compile_srcs( @@ -896,32 +1079,90 @@ def cxx_compile_srcs( own_preprocessors: list[CPreprocessor], inherited_non_exported_preprocessor_infos: list[CPreprocessorInfo], inherited_exported_preprocessor_infos: list[CPreprocessorInfo], - preferred_linkage: Linkage) -> _CxxCompiledSourcesOutput: + preferred_linkage: Linkage, + add_coverage_instrumentation_compiler_flags: bool, + own_exported_preprocessors: list[CPreprocessor] = []) -> _CxxCompiledSourcesOutput: """ Compile objects we'll need for archives and shared libraries. """ # Create the commands and argsfiles to use for compiling each source file + if own_exported_preprocessors: + header_preprocessor_info = cxx_merge_cpreprocessors( + ctx, + own_exported_preprocessors, + inherited_exported_preprocessor_infos, + ) + else: + header_preprocessor_info = CPreprocessorInfo() compile_cmd_output = create_compile_cmds( ctx = ctx, impl_params = impl_params, own_preprocessors = own_preprocessors, inherited_preprocessor_infos = inherited_non_exported_preprocessor_infos + inherited_exported_preprocessor_infos, + header_preprocessor_info = header_preprocessor_info, + add_coverage_instrumentation_compiler_flags = add_coverage_instrumentation_compiler_flags, ) + # Define header unit. + header_unit_preprocessors = precompile_cxx(ctx, impl_params, own_exported_preprocessors, compile_cmd_output) + # Define object files. - pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = True) - pic = _get_library_compile_output(ctx, pic_cxx_outs, impl_params.extra_link_input) + pic_cxx_outs = compile_cxx( + ctx = ctx, + src_compile_cmds = compile_cmd_output.src_compile_cmds, + flavor = CxxCompileFlavor("pic"), + provide_syntax_only = True, + use_header_units = impl_params.use_header_units, + ) + pic = _get_library_compile_output( + ctx = ctx, + src_compile_cmds = compile_cmd_output.src_compile_cmds, + outs = pic_cxx_outs, + extra_link_input = impl_params.extra_link_input, + ) non_pic = None + pic_optimized = None if preferred_linkage != Linkage("shared"): - non_pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = False) - non_pic = _get_library_compile_output(ctx, non_pic_cxx_outs, impl_params.extra_link_input) + non_pic_cxx_outs = compile_cxx( + ctx = ctx, + src_compile_cmds = compile_cmd_output.src_compile_cmds, + flavor = CxxCompileFlavor("default"), + # Diagnostics from the pic and non-pic compilation would be + # identical. We can avoid instantiating a second set of actions. + provide_syntax_only = False, + use_header_units = impl_params.use_header_units, + ) + non_pic = _get_library_compile_output( + ctx = ctx, + src_compile_cmds = compile_cmd_output.src_compile_cmds, + outs = non_pic_cxx_outs, + extra_link_input = impl_params.extra_link_input, + ) + + if get_cxx_toolchain_info(ctx).optimization_compiler_flags_EXPERIMENTAL: + optimized_cxx_outs = compile_cxx( + ctx = ctx, + src_compile_cmds = compile_cmd_output.src_compile_cmds, + flavor = CxxCompileFlavor("pic_optimized"), + # Diagnostics from the pic and non-pic compilation would be + # identical. We can avoid instantiating a second set of actions. + provide_syntax_only = False, + ) + pic_optimized = _get_library_compile_output( + ctx = ctx, + src_compile_cmds = compile_cmd_output.src_compile_cmds, + outs = optimized_cxx_outs, + extra_link_input = impl_params.extra_link_input, + ) return _CxxCompiledSourcesOutput( compile_cmds = compile_cmd_output, pic = pic, + pic_optimized = pic_optimized, non_pic = non_pic, + header_unit_preprocessors = header_unit_preprocessors, ) def _form_library_outputs( @@ -932,13 +1173,15 @@ def _form_library_outputs( dep_infos: LinkArgs, extra_static_linkables: list[[FrameworksLinkable, SwiftmoduleLinkable, SwiftRuntimeLinkable]], gnu_use_link_groups: bool, - link_execution_preference: LinkExecutionPreference) -> _CxxAllLibraryOutputs: + link_execution_preference: LinkExecutionPreference, + shared_interface_info: [SharedInterfaceInfo, None]) -> _CxxAllLibraryOutputs: # Build static/shared libs and the link info we use to export them to dependents. outputs = {} solib = None link_infos = {} providers = [] sanitizer_runtime_files = [] + gcno_files = [] linker_flags = cxx_attr_linker_flags_all(ctx) @@ -953,6 +1196,7 @@ def _form_library_outputs( # We don't know which outputs consumers may want, so we define all the possibilities given our preferred linkage. for output_style in get_output_styles_for_linkage(preferred_linkage): output = None + optimized_info = None stripped = None info = None @@ -966,6 +1210,27 @@ def _form_library_outputs( if not lib_compile_output: fail("output_style {} requires non_pic compiled srcs, but didn't have any in {}".format(output_style, compiled_srcs)) + gcno_files += lib_compile_output.gcno_files + + if pic and compiled_srcs.pic_optimized and compiled_srcs.pic_optimized.objects: + _, optimized_info = _static_library( + ctx, + impl_params, + compiled_srcs.pic_optimized.objects, + objects_have_external_debug_info = compiled_srcs.pic_optimized.objects_have_external_debug_info, + external_debug_info = make_artifact_tset( + ctx.actions, + label = ctx.label, + artifacts = compiled_srcs.pic_optimized.external_debug_info, + children = impl_params.additional.static_external_debug_info, + ), + pic = pic, + optimized = True, + stripped = False, + extra_linkables = extra_static_linkables, + bitcode_objects = compiled_srcs.pic_optimized.bitcode_objects, + ) + # Only generate an archive if we have objects to include if lib_compile_output.objects: output, info = _static_library( @@ -981,6 +1246,7 @@ def _form_library_outputs( ), pic = pic, stripped = False, + optimized = False, extra_linkables = extra_static_linkables, bitcode_objects = lib_compile_output.bitcode_objects, ) @@ -990,6 +1256,7 @@ def _form_library_outputs( lib_compile_output.stripped_objects, pic = pic, stripped = True, + optimized = False, extra_linkables = extra_static_linkables, bitcode_objects = lib_compile_output.bitcode_objects, ) @@ -1014,19 +1281,24 @@ def _form_library_outputs( label = ctx.label, artifacts = external_debug_artifacts, children = impl_params.additional.shared_external_debug_info, + tags = impl_params.additional.external_debug_info_tags, ) + gcno_files += compiled_srcs.pic.gcno_files + extra_linker_flags, extra_linker_outputs = impl_params.extra_linker_outputs_factory(ctx) + result = _shared_library( - ctx, - impl_params, - compiled_srcs.pic.objects, - external_debug_info, - dep_infos, - gnu_use_link_groups, + ctx = ctx, + impl_params = impl_params, + objects = compiled_srcs.pic.objects, + external_debug_info = external_debug_info, + dep_infos = dep_infos, + gnu_use_link_groups = gnu_use_link_groups, extra_linker_flags = extra_linker_flags, link_ordering = map_val(LinkOrdering, ctx.attrs.link_ordering), link_execution_preference = link_execution_preference, + shared_interface_info = shared_interface_info, ) shlib = result.link_result.linked_object info = result.info @@ -1080,8 +1352,19 @@ def _form_library_outputs( link_infos[output_style] = LinkInfos( default = ldflags(info), stripped = ldflags(stripped) if stripped != None else None, + optimized = ldflags(optimized_info) if optimized_info != None else None, ) + if get_cxx_toolchain_info(ctx).gcno_files: + deps_gcno_files = [ + x[GcnoFilesInfo].gcno_files + for x in ctx.attrs.deps + ctx.attrs.exported_deps + if GcnoFilesInfo in x + ] + providers.append(GcnoFilesInfo( + gcno_files = dedupe(flatten(deps_gcno_files) + gcno_files), + )) + return _CxxAllLibraryOutputs( outputs = outputs, link_infos = link_infos, @@ -1099,7 +1382,7 @@ def _strip_objects(ctx: AnalysisContext, objects: list[Artifact]) -> list[Artifa # Stripping is not supported on Windows linker_type = cxx_toolchain_info.linker_info.type - if linker_type == "windows": + if linker_type == LinkerType("windows"): return objects # Disable stripping if no `strip` binary was provided by the toolchain. @@ -1128,8 +1411,9 @@ def _get_shared_library_links( force_link_group_linking, frameworks_linkable: [FrameworksLinkable, None], swiftmodule_linkable: [SwiftmoduleLinkable, None], + exported_symbol_outputs: list[Artifact], force_static_follows_dependents: bool = True, - swift_runtime_linkable: [SwiftRuntimeLinkable, None] = None) -> (LinkArgs, [DefaultInfo, None], LinkExecutionPreference): + swift_runtime_linkable: [SwiftRuntimeLinkable, None] = None) -> (LinkArgs, [DefaultInfo, None], LinkExecutionPreference, [SharedInterfaceInfo, None]): """ Returns LinkArgs with the content to link, and a link group map json output if applicable. @@ -1144,7 +1428,8 @@ def _get_shared_library_links( # If we're not filtering for link groups, link against the shared dependencies if not link_group_mappings and not force_link_group_linking: - deps_merged_link_infos = cxx_inherited_link_info(dedupe(flatten([non_exported_deps, exported_deps]))) + deps = dedupe(flatten([non_exported_deps, exported_deps])) + deps_merged_link_infos = cxx_inherited_link_info(deps) link_strategy = cxx_attr_link_strategy(ctx.attrs) @@ -1154,6 +1439,10 @@ def _get_shared_library_links( # Not all rules calling `cxx_library_parameterized` have `link_execution_preference`. Notably `cxx_python_extension`. link_execution_preference = get_link_execution_preference(ctx, []) if hasattr(ctx.attrs, "link_execution_preference") else LinkExecutionPreference("any") + # Collect the shared interface providers for this link unit and strategy. + # These are merged when linking shared library output. + shared_interface_info = create_shared_interface_info(ctx, exported_symbol_outputs, deps) + return apple_build_link_args_with_deduped_flags( ctx, deps_merged_link_infos, @@ -1165,7 +1454,7 @@ def _get_shared_library_links( process_link_strategy_for_pic_behavior(link_strategy, pic_behavior), swiftmodule_linkable, swift_runtime_linkable = swift_runtime_linkable, - ), None, link_execution_preference + ), None, link_execution_preference, shared_interface_info # Else get filtered link group links prefer_stripped = cxx_is_gnu(ctx) and ctx.attrs.prefer_stripped_objects @@ -1175,8 +1464,11 @@ def _get_shared_library_links( if link_strategy == LinkStrategy("static"): link_strategy = LinkStrategy("static_pic") link_strategy = process_link_strategy_for_pic_behavior(link_strategy, pic_behavior) - filtered_labels_to_links_map = get_filtered_labels_to_links_map( - linkable_graph_node_map_func(), + linkable_graph_label_to_node_map = linkable_graph_node_map_func() + + filtered_labels_to_links = get_filtered_labels_to_links_map( + None, + linkable_graph_label_to_node_map, link_group, {}, link_group_mappings, @@ -1191,10 +1483,10 @@ def _get_shared_library_links( prefer_stripped = prefer_stripped, force_static_follows_dependents = force_static_follows_dependents, ) - filtered_links = get_filtered_links(filtered_labels_to_links_map) - filtered_targets = get_filtered_targets(filtered_labels_to_links_map) + filtered_links = get_filtered_links(filtered_labels_to_links.map) + filtered_targets = get_filtered_targets(filtered_labels_to_links.map) - link_execution_preference = get_link_execution_preference(ctx, filtered_labels_to_links_map.keys()) + link_execution_preference = get_link_execution_preference(ctx, filtered_labels_to_links.map.keys()) # Unfortunately, link_groups does not use MergedLinkInfo to represent the args # for the resolved nodes in the graph. @@ -1202,7 +1494,18 @@ def _get_shared_library_links( if additional_links: filtered_links.append(additional_links) - return LinkArgs(infos = filtered_links), get_link_group_map_json(ctx, filtered_targets), link_execution_preference + # Collect the interface providers from the targets in this link group, these will + # be merged when linking shared library output. If this library has no + # interface output then interface generation is disabled and we can skip collection. + shared_interface_infos = [] + if len(exported_symbol_outputs) > 0: + for label in filtered_labels_to_links.map.keys(): + linkable_node = linkable_graph_label_to_node_map[label] + if linkable_node.shared_interface_info != None: + shared_interface_infos.append(linkable_node.shared_interface_info) + + shared_interface_info = create_shared_interface_info_with_children(ctx, exported_symbol_outputs, shared_interface_infos) + return LinkArgs(infos = filtered_links), get_link_group_map_json(ctx, filtered_targets), link_execution_preference, shared_interface_info def _use_pic(output_style: LibOutputStyle) -> bool: """ @@ -1218,6 +1521,7 @@ def _static_library( impl_params: CxxRuleConstructorParams, objects: list[Artifact], pic: bool, + optimized: bool, stripped: bool, extra_linkables: list[[FrameworksLinkable, SwiftmoduleLinkable, SwiftRuntimeLinkable]], objects_have_external_debug_info: bool = False, @@ -1235,18 +1539,14 @@ def _static_library( linker_info = get_cxx_toolchain_info(ctx).linker_info linker_type = linker_info.type - base_name = _base_static_library_name(ctx, stripped) + base_name = _base_static_library_name(ctx, optimized, stripped) name = _archive_name(base_name, pic = pic, extension = linker_info.static_library_extension) # If we have extra hidden deps of this target add them to the archive action # so they are forced to build for static library output. - archive_args = cmd_args(objects) - if impl_params.extra_hidden: - archive_args.hidden(impl_params.extra_hidden) - - archive = make_archive(ctx, name, objects, archive_args) + archive = make_archive(ctx, name, objects, impl_params.extra_hidden) - bitcode_bundle = _bitcode_bundle(ctx, bitcode_objects, pic, stripped) + bitcode_bundle = _bitcode_bundle(ctx, bitcode_objects, optimized, pic, stripped) if False: # TODO(nga): bitcode_bundle.artifact def unknown(): @@ -1283,7 +1583,7 @@ def _static_library( # On darwin, the linked output references the archive that contains the # object files instead of the originating objects. object_external_debug_info = [] - if linker_type == "darwin": + if linker_type == LinkerType("darwin"): object_external_debug_info.append(archive.artifact) object_external_debug_info.extend(archive.external_objects) elif objects_have_external_debug_info: @@ -1294,6 +1594,7 @@ def _static_library( label = ctx.label, artifacts = object_external_debug_info, children = [external_debug_info], + tags = impl_params.additional.external_debug_info_tags, ) return ( @@ -1325,13 +1626,14 @@ def _static_library( def _bitcode_bundle( ctx: AnalysisContext, objects: [list[Artifact], None], - pic: bool = False, - stripped: bool = False, + optimized: bool, + pic: bool, + stripped: bool, name_extra = "") -> [BitcodeBundle, None]: if objects == None or len(objects) == 0: return None - base_name = _base_static_library_name(ctx, False) + base_name = _base_static_library_name(ctx, optimized, stripped = False) name = name_extra + _bitcode_bundle_name(base_name, pic, stripped) return make_bitcode_bundle(ctx, name, objects) @@ -1340,7 +1642,7 @@ _CxxSharedLibraryResult = record( link_result = CxxLinkResult, # Shared library name (e.g. SONAME) soname = str, - objects_bitcode_bundle = [Artifact, None], + objects_bitcode_bundle = Artifact | None, # `LinkInfo` used to link against the shared library. info = LinkInfo, ) @@ -1354,7 +1656,8 @@ def _shared_library( gnu_use_link_groups: bool, extra_linker_flags: list[ArgLike], link_execution_preference: LinkExecutionPreference, - link_ordering: [LinkOrdering, None] = None) -> _CxxSharedLibraryResult: + link_ordering: [LinkOrdering, None], + shared_interface_info: [SharedInterfaceInfo, None]) -> _CxxSharedLibraryResult: """ Generate a shared library and the associated native link info used by dependents to link against it. @@ -1364,7 +1667,7 @@ def _shared_library( cxx_toolchain = get_cxx_toolchain_info(ctx) linker_info = cxx_toolchain.linker_info - local_bitcode_bundle = _bitcode_bundle(ctx, objects, name_extra = "objects-") + local_bitcode_bundle = _bitcode_bundle(ctx, objects, optimized = False, pic = False, stripped = False, name_extra = "objects-") # NOTE(agallagher): We add exported link flags here because it's what v1 # does, but the intent of exported link flags are to wrap the link output @@ -1372,6 +1675,7 @@ def _shared_library( # generating a link product. linker_flags = cxx_attr_linker_flags_all(ctx) link_info = LinkInfo( + dist_thin_lto_codegen_flags = getattr(ctx.attrs, "dist_thin_lto_codegen_flags", []), pre_flags = ( linker_flags.flags + linker_flags.exported_flags + @@ -1399,19 +1703,21 @@ def _shared_library( links = [LinkArgs(infos = [link_info]), dep_infos] if impl_params.extra_hidden: links.append( - LinkArgs(flags = cmd_args().hidden(impl_params.extra_hidden)), + LinkArgs(flags = cmd_args(hidden = impl_params.extra_hidden)), ) link_result = cxx_link_shared_library( ctx = ctx, output = soname, opts = link_options( + enable_distributed_thinlto = getattr(ctx.attrs, "enable_distributed_thinlto", False), links = links, identifier = soname, link_ordering = link_ordering, strip = impl_params.strip_executable, strip_args_factory = impl_params.strip_args_factory, link_execution_preference = link_execution_preference, + error_handler = impl_params.error_handler, ), name = soname if impl_params.use_soname else None, shared_library_flags = impl_params.shared_library_flags, @@ -1421,8 +1727,26 @@ def _shared_library( # If shared library interfaces are enabled, link that and use it as # the shared lib that dependents will link against. if cxx_use_shlib_intfs(ctx): - if not linker_info.produce_interface_from_stub_shared_library: - shlib_for_interface = exported_shlib + mode = get_cxx_toolchain_info(ctx).linker_info.shlib_interfaces + if mode == ShlibInterfacesMode("stub_from_library"): + # Generate a library interface from the linked library output. + # This will prevent relinking rdeps when changes do not affect + # the library symbols. + exported_shlib = shared_library_interface( + ctx = ctx, + shared_lib = exported_shlib, + ) + elif mode == ShlibInterfacesMode("stub_from_headers"): + # Generate a library interface from its deps exported_headers. + # This will allow for linker parallelisation as we do not have + # to wait for dependent libraries to link. + # If the provider is missing this is a non apple_library target, + # so skip producing the interface. + if shared_interface_info != None: + # collect the linker args which are required + # to correctly set symbol visibility. + link_args = [unpack_link_args(link) for link in links] + exported_shlib = generate_tbd_with_symbols(ctx, soname, shared_interface_info.interfaces, link_args) elif not gnu_use_link_groups: # TODO(agallagher): There's a bug in shlib intfs interacting with link # groups, where we don't include the symbols we're meant to export from @@ -1451,22 +1775,15 @@ def _shared_library( identifier = soname + "-interface", link_execution_preference = link_execution_preference, strip = impl_params.strip_executable, + error_handler = impl_params.error_handler, ), name = soname, ) - shlib_for_interface = intf_link_result.linked_object.output - else: - shlib_for_interface = None - - if shlib_for_interface: - # Convert the shared library into an interface. - shlib_interface = shared_library_interface( + exported_shlib = shared_library_interface( ctx = ctx, - shared_lib = shlib_for_interface, + shared_lib = intf_link_result.linked_object.output, ) - exported_shlib = shlib_interface - # Link against import library on Windows. if link_result.linked_object.import_library: exported_shlib = link_result.linked_object.import_library @@ -1496,8 +1813,8 @@ def _soname(ctx: AnalysisContext, impl_params) -> str: return get_shared_library_name_for_param(linker_info, explicit_soname) return get_default_shared_library_name(linker_info, ctx.label) -def _base_static_library_name(ctx: AnalysisContext, stripped: bool) -> str: - return ctx.label.name + ".stripped" if stripped else ctx.label.name +def _base_static_library_name(ctx: AnalysisContext, optimized: bool, stripped: bool) -> str: + return "{}{}{}".format(ctx.label.name, ".optimized" if optimized else "", ".stripped" if stripped else "") def _archive_name(name: str, pic: bool, extension: str) -> str: return "lib{}{}.{}".format(name, ".pic" if pic else "", extension) diff --git a/prelude/cxx/cxx_library_utility.bzl b/prelude/cxx/cxx_library_utility.bzl index c3dd3082065..5b1e255e0f7 100644 --- a/prelude/cxx/cxx_library_utility.bzl +++ b/prelude/cxx/cxx_library_utility.bzl @@ -15,21 +15,21 @@ load( "@prelude//linking:link_info.bzl", "LinkStrategy", "LinkStyle", - "Linkage", "LinkerFlags", "MergedLinkInfo", ) +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//utils:utils.bzl", "flatten", "from_named_set", ) +load(":cxx_context.bzl", "get_cxx_platform_info", "get_cxx_toolchain_info") load( - ":compile.bzl", - "CxxCompileOutput", # @unused Used as a type + ":cxx_toolchain_types.bzl", + "LinkerType", + "ShlibInterfacesMode", ) -load(":cxx_context.bzl", "get_cxx_platform_info", "get_cxx_toolchain_info") -load(":cxx_toolchain_types.bzl", "ShlibInterfacesMode") load( ":headers.bzl", "cxx_attr_header_namespace", @@ -147,7 +147,7 @@ def cxx_attr_resources(ctx: AnalysisContext) -> dict[str, ArtifactOutputs]: return resources def cxx_is_gnu(ctx: AnalysisContext) -> bool: - return get_cxx_toolchain_info(ctx).linker_info.type == "gnu" + return get_cxx_toolchain_info(ctx).linker_info.type == LinkerType("gnu") def cxx_use_shlib_intfs(ctx: AnalysisContext) -> bool: """ @@ -161,6 +161,12 @@ def cxx_use_shlib_intfs(ctx: AnalysisContext) -> bool: linker_info = get_cxx_toolchain_info(ctx).linker_info return linker_info.shlib_interfaces != ShlibInterfacesMode("disabled") +def cxx_use_shlib_intfs_mode(ctx: AnalysisContext, mode: ShlibInterfacesMode) -> bool: + """ + Verify we are using a specific shared library interface mode. + """ + return cxx_use_shlib_intfs(ctx) and get_cxx_toolchain_info(ctx).linker_info.shlib_interfaces == mode + def cxx_platform_supported(ctx: AnalysisContext) -> bool: """ Return whether this rule's `supported_platforms_regex` matches the current @@ -174,17 +180,3 @@ def cxx_platform_supported(ctx: AnalysisContext) -> bool: ctx.attrs.supported_platforms_regex, get_cxx_platform_info(ctx).name, ) - -def cxx_objects_sub_targets(outs: list[CxxCompileOutput]) -> dict[str, list[Provider]]: - objects_sub_targets = {} - for obj in outs: - sub_targets = {} - if obj.clang_trace: - sub_targets["clang-trace"] = [DefaultInfo(obj.clang_trace)] - if obj.clang_remarks: - sub_targets["clang-remarks"] = [DefaultInfo(obj.clang_remarks)] - objects_sub_targets[obj.object.short_path] = [DefaultInfo( - obj.object, - sub_targets = sub_targets, - )] - return objects_sub_targets diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 87b0de94717..4cf953eb22d 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -7,18 +7,28 @@ load("@prelude//:artifact_tset.bzl", "project_artifacts") load("@prelude//:paths.bzl", "paths") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:linker.bzl", "get_rpath_origin") +load("@prelude//cxx:target_sdk_version.bzl", "get_target_sdk_version_flags") load( "@prelude//linking:link_info.bzl", "LinkArgs", "LinkOrdering", # @unused Used as a type - "LinkedObject", # @unused Used as a type "unpack_link_args", + "unpack_link_args_excluding_filelist", "unpack_link_args_filelist", ) load("@prelude//linking:lto.bzl", "LtoMode") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type + "create_shlib_symlink_tree", +) load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type def generates_split_debug(toolchain: CxxToolchainInfo): @@ -36,14 +46,14 @@ def generates_split_debug(toolchain: CxxToolchainInfo): def linker_map_args(toolchain: CxxToolchainInfo, linker_map) -> LinkArgs: linker_type = toolchain.linker_info.type - if linker_type == "darwin": + if linker_type == LinkerType("darwin"): flags = [ "-Xlinker", "-map", "-Xlinker", linker_map, ] - elif linker_type == "gnu": + elif linker_type == LinkerType("gnu"): flags = [ "-Xlinker", "-Map", @@ -57,22 +67,28 @@ def linker_map_args(toolchain: CxxToolchainInfo, linker_map) -> LinkArgs: LinkArgsOutput = record( link_args = ArgLike, hidden = list[typing.Any], - pdb_artifact = [Artifact, None], + pdb_artifact = Artifact | None, # The filelist artifact which contains the list of all object files. # Only present for Darwin linkers. Note that object files referenced # _inside_ the filelist are _not_ part of the `hidden` field above. # That's by design - we do not want to materialise _all_ object files # to inspect the filelist. Intended to be used for debugging. - filelist = [Artifact, None], + filelist = Artifact | None, ) +def get_extra_darwin_linker_flags() -> cmd_args: + """ + Returns a cmd_args object filled with hard coded linker flags that should be used for all links with a Darwin toolchain. + """ + return cmd_args("-Wl,-oso_prefix,.") + def make_link_args( + ctx: AnalysisContext, actions: AnalysisActions, cxx_toolchain_info: CxxToolchainInfo, links: list[LinkArgs], suffix = None, output_short_path: [str, None] = None, - is_shared: [bool, None] = None, link_ordering: [LinkOrdering, None] = None) -> LinkArgsOutput: """ Merges LinkArgs. Returns the args, files that must be present for those @@ -86,28 +102,32 @@ def make_link_args( linker_info = cxx_toolchain_info.linker_info linker_type = linker_info.type - # On Apple platforms, DWARF data is contained in the object files - # and executables contains paths to the object files (N_OSO stab). - # - # By default, ld64 will use absolute file paths in N_OSO entries - # which machine-dependent executables. Such executables would not - # be debuggable on any host apart from the host which performed - # the linking. Instead, we want produce machine-independent - # hermetic executables, so we need to relativize those paths. - # - # This is accomplished by passing the `oso-prefix` flag to ld64, - # which will strip the provided prefix from the N_OSO paths. - # - # The flag accepts a special value, `.`, which means it will - # use the current workding directory. This will make all paths - # relative to the parent of `buck-out`. - # - # Because all actions in Buck2 are run from the project root - # and `buck-out` is always inside the project root, we can - # safely pass `.` as the `-oso_prefix` without having to - # write a wrapper script to compute it dynamically. - if linker_type == "darwin": - args.add(["-Wl,-oso_prefix,."]) + if linker_type == LinkerType("darwin"): + # Darwin requires a target triple specified to + # control the deployment target being linked for. + args.add(get_target_sdk_version_flags(ctx)) + + # On Apple platforms, DWARF data is contained in the object files + # and executables contains paths to the object files (N_OSO stab). + # + # By default, ld64 will use absolute file paths in N_OSO entries + # which machine-dependent executables. Such executables would not + # be debuggable on any host apart from the host which performed + # the linking. Instead, we want produce machine-independent + # hermetic executables, so we need to relativize those paths. + # + # This is accomplished by passing the `oso-prefix` flag to ld64, + # which will strip the provided prefix from the N_OSO paths. + # + # The flag accepts a special value, `.`, which means it will + # use the current workding directory. This will make all paths + # relative to the parent of `buck-out`. + # + # Because all actions in Buck2 are run from the project root + # and `buck-out` is always inside the project root, we can + # safely pass `.` as the `-oso_prefix` without having to + # write a wrapper script to compute it dynamically. + args.add(get_extra_darwin_linker_flags()) pdb_artifact = None if linker_info.is_pdb_generated and output_short_path != None: @@ -115,23 +135,26 @@ def make_link_args( pdb_artifact = actions.declare_output(pdb_filename) hidden.append(pdb_artifact.as_output()) + filelists = None + if linker_type == LinkerType("darwin"): + filelists = filter(None, [unpack_link_args_filelist(link) for link in links]) + hidden.extend(filelists) + for link in links: - args.add(unpack_link_args(link, is_shared, link_ordering = link_ordering)) + if filelists: + # If we are using a filelist, only add argument that aren't already in the + # filelist. This is to avoid duplicate inputs in the link command. + args.add(unpack_link_args_excluding_filelist(link, link_ordering = link_ordering)) + else: + args.add(unpack_link_args(link, link_ordering = link_ordering)) - filelists = filter(None, [unpack_link_args_filelist(link) for link in links]) - hidden.extend(filelists) + # On Darwin, filelist args _must_ come last as the order can affect symbol + # resolution and result in binary size increases. filelist_file = None if filelists: - if linker_type == "gnu": - fail("filelist populated for gnu linker") - elif linker_type == "darwin": - # On Darwin, filelist args _must_ come last as there's semantical difference - # of the position. - path = actions.write("filelist%s.txt" % suffix, filelists) - args.add(["-Xlinker", "-filelist", "-Xlinker", path]) - filelist_file = path - else: - fail("Linker type {} not supported".format(linker_type)) + path = actions.write("filelist%s.txt" % suffix, filelists) + args.add(cmd_args(["-Xlinker", "-filelist", "-Xlinker", path])) + filelist_file = path return LinkArgsOutput( link_args = args, @@ -177,22 +200,21 @@ def cxx_sanitizer_runtime_arguments( if not linker_info.sanitizer_runtime_files: fail("C++ sanitizer runtime enabled but there are no runtime files") - if linker_info.type == "darwin": - runtime_rpath = cmd_args() + if linker_info.type == LinkerType("darwin"): + # ignore_artifacts as the runtime directory is not required at _link_ time + runtime_rpath = cmd_args(ignore_artifacts = True) runtime_files = linker_info.sanitizer_runtime_files for runtime_shared_lib in runtime_files: # Rpath-relative dylibs have an install name of `@rpath/libName.dylib`, # which means we need to add the parent dir of the dylib as an rpath. - runtime_shared_lib_dir = cmd_args(runtime_shared_lib).parent() + runtime_shared_lib_dir = cmd_args(runtime_shared_lib, parent = 1) # The parent dir of the runtime shared lib must appear as a path # relative to the parent dir of the binary. `@executable_path` # represents the parent dir of the binary, not the binary itself. - runtime_shared_lib_rpath = cmd_args(runtime_shared_lib_dir, format = "-Wl,-rpath,@executable_path/{}").relative_to(output, parent = 1) + runtime_shared_lib_rpath = cmd_args(runtime_shared_lib_dir, format = "-Wl,-rpath,@executable_path/{}", relative_to = (output, 1)) runtime_rpath.add(runtime_shared_lib_rpath) - # Ignore_artifacts() as the runtime directory is not required at _link_ time - runtime_rpath = runtime_rpath.ignore_artifacts() return CxxSanitizerRuntimeArguments( extra_link_args = [ runtime_rpath, @@ -214,7 +236,7 @@ def executable_shared_lib_arguments( ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, output: Artifact, - shared_libs: dict[str, LinkedObject]) -> ExecutableSharedLibArguments: + shared_libs: list[SharedLibrary]) -> ExecutableSharedLibArguments: extra_link_args = [] runtime_files = [] shared_libs_symlink_tree = None @@ -223,30 +245,36 @@ def executable_shared_lib_arguments( # of a build. Do not add to runtime_files. external_debug_info = project_artifacts( actions = ctx.actions, - tsets = [shlib.external_debug_info for shlib in shared_libs.values()], + tsets = [shlib.lib.external_debug_info for shlib in shared_libs], ) linker_type = cxx_toolchain.linker_info.type if len(shared_libs) > 0: - if linker_type == "windows": + if linker_type == LinkerType("windows"): shared_libs_symlink_tree = [ctx.actions.symlink_file( - shlib.output.basename, - shlib.output, - ) for _, shlib in shared_libs.items()] + shlib.lib.output.basename, + shlib.lib.output, + ) for shlib in shared_libs] runtime_files.extend(shared_libs_symlink_tree) # Windows doesn't support rpath. else: - shared_libs_symlink_tree = ctx.actions.symlinked_dir( - shared_libs_symlink_tree_name(output), - {name: shlib.output for name, shlib in shared_libs.items()}, + shared_libs_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = shared_libs_symlink_tree_name(output), + shared_libs = shared_libs, ) runtime_files.append(shared_libs_symlink_tree) rpath_reference = get_rpath_origin(linker_type) - # We ignore_artifacts() here since we don't want the symlink tree to actually be there for the link. - rpath_arg = cmd_args(shared_libs_symlink_tree, format = "-Wl,-rpath,{}/{{}}".format(rpath_reference)).relative_to(output, parent = 1).ignore_artifacts() + # We ignore_artifacts here since we don't want the symlink tree to actually be there for the link. + rpath_arg = cmd_args( + shared_libs_symlink_tree, + format = "-Wl,-rpath,{}/{{}}".format(rpath_reference), + ignore_artifacts = True, + relative_to = (output, 1), + ) extra_link_args.append(rpath_arg) return ExecutableSharedLibArguments( diff --git a/prelude/cxx/cxx_sources.bzl b/prelude/cxx/cxx_sources.bzl index ece339680fd..ffbc433b311 100644 --- a/prelude/cxx/cxx_sources.bzl +++ b/prelude/cxx/cxx_sources.bzl @@ -9,15 +9,22 @@ load( "@prelude//utils:utils.bzl", "flatten", ) -load( - ":compile.bzl", - "CxxSrcWithFlags", -) load(":platform.bzl", "cxx_by_platform") +# An input to cxx compilation, consisting of a file to compile and optional +# file specific flags to compile with. +CxxSrcWithFlags = record( + file = field(Artifact), + flags = field(list[ResolvedStringWithMacros], []), + # If we have multiple source entries with same files but different flags, + # specify an index so we can differentiate them. Otherwise, use None. + index = field([int, None], None), + is_header = field(bool, False), +) + # The source files -def get_srcs_with_flags(ctx: AnalysisContext) -> list[CxxSrcWithFlags]: - all_srcs = ctx.attrs.srcs + flatten(cxx_by_platform(ctx, ctx.attrs.platform_srcs)) +def get_srcs_with_flags(ctx: AnalysisContext, additional_srcs: list = []) -> list[CxxSrcWithFlags]: + all_srcs = ctx.attrs.srcs + flatten(cxx_by_platform(ctx, ctx.attrs.platform_srcs)) + additional_srcs # src -> flags_hash -> flags flags_sets_by_src = {} diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index 8eca13ea883..27c4e8a6eff 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -6,11 +6,32 @@ # of this source tree. load("@prelude//:is_full_meta_repo.bzl", "is_full_meta_repo") -load("@prelude//cxx:cxx_toolchain_types.bzl", "AsCompilerInfo", "AsmCompilerInfo", "BinaryUtilitiesInfo", "CCompilerInfo", "CudaCompilerInfo", "CvtresCompilerInfo", "CxxCompilerInfo", "CxxObjectFormat", "DepTrackingMode", "DistLtoToolsInfo", "HipCompilerInfo", "LinkerInfo", "PicBehavior", "RcCompilerInfo", "ShlibInterfacesMode", "StripFlagsInfo", "cxx_toolchain_infos") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "AsCompilerInfo", + "AsmCompilerInfo", + "BinaryUtilitiesInfo", + "CCompilerInfo", + "CudaCompilerInfo", + "CvtresCompilerInfo", + "CxxCompilerInfo", + "CxxInternalTools", + "CxxObjectFormat", + "DepTrackingMode", + "HipCompilerInfo", + "LinkerInfo", + "LinkerType", + "PicBehavior", + "RcCompilerInfo", + "ShlibInterfacesMode", + "StripFlagsInfo", + "cxx_toolchain_infos", +) load("@prelude//cxx:cxx_utility.bzl", "cxx_toolchain_allow_cache_upload_args") load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:headers.bzl", "HeaderMode", "HeadersAsRawHeadersMode") load("@prelude//cxx:linker.bzl", "LINKERS", "is_pdb_generated") +load("@prelude//cxx:target_sdk_version.bzl", "get_toolchain_target_sdk_version") load("@prelude//linking:link_info.bzl", "LinkOrdering", "LinkStyle") load("@prelude//linking:lto.bzl", "LtoMode", "lto_compiler_flags") load("@prelude//utils:utils.bzl", "flatten", "value_or") @@ -30,20 +51,18 @@ def cxx_toolchain_impl(ctx): c_info = CCompilerInfo( compiler = c_compiler, compiler_type = ctx.attrs.c_compiler_type or ctx.attrs.compiler_type, - compiler_flags = cmd_args(ctx.attrs.c_compiler_flags).add(c_lto_flags), + compiler_flags = cmd_args(ctx.attrs.c_compiler_flags, c_lto_flags), preprocessor = c_compiler, preprocessor_flags = cmd_args(ctx.attrs.c_preprocessor_flags), - dep_files_processor = ctx.attrs._dep_files_processor[RunInfo], allow_cache_upload = ctx.attrs.c_compiler_allow_cache_upload, ) cxx_compiler = _get_maybe_wrapped_msvc(ctx.attrs.cxx_compiler[RunInfo], ctx.attrs.cxx_compiler_type or ctx.attrs.compiler_type, ctx.attrs._msvc_hermetic_exec[RunInfo]) cxx_info = CxxCompilerInfo( compiler = cxx_compiler, compiler_type = ctx.attrs.cxx_compiler_type or ctx.attrs.compiler_type, - compiler_flags = cmd_args(ctx.attrs.cxx_compiler_flags).add(c_lto_flags), + compiler_flags = cmd_args(ctx.attrs.cxx_compiler_flags, c_lto_flags), preprocessor = cxx_compiler, preprocessor_flags = cmd_args(ctx.attrs.cxx_preprocessor_flags), - dep_files_processor = ctx.attrs._dep_files_processor[RunInfo], allow_cache_upload = ctx.attrs.cxx_compiler_allow_cache_upload, ) asm_info = AsmCompilerInfo( @@ -51,21 +70,19 @@ def cxx_toolchain_impl(ctx): compiler_type = ctx.attrs.asm_compiler_type or ctx.attrs.compiler_type, compiler_flags = cmd_args(ctx.attrs.asm_compiler_flags), preprocessor_flags = cmd_args(ctx.attrs.asm_preprocessor_flags), - dep_files_processor = ctx.attrs._dep_files_processor[RunInfo], ) if ctx.attrs.asm_compiler else None as_info = AsCompilerInfo( compiler = ctx.attrs.assembler[RunInfo], compiler_type = ctx.attrs.assembler_type or ctx.attrs.compiler_type, compiler_flags = cmd_args(ctx.attrs.assembler_flags), preprocessor_flags = cmd_args(ctx.attrs.assembler_preprocessor_flags), - dep_files_processor = ctx.attrs._dep_files_processor[RunInfo], ) if ctx.attrs.assembler else None cuda_info = CudaCompilerInfo( compiler = ctx.attrs.cuda_compiler[RunInfo], compiler_type = ctx.attrs.cuda_compiler_type or ctx.attrs.compiler_type, compiler_flags = cmd_args(ctx.attrs.cuda_compiler_flags), preprocessor_flags = cmd_args(ctx.attrs.cuda_preprocessor_flags), - dep_files_processor = ctx.attrs._dep_files_processor[RunInfo], + allow_cache_upload = ctx.attrs.cuda_compiler_allow_cache_upload, ) if ctx.attrs.cuda_compiler else None hip_info = HipCompilerInfo( compiler = ctx.attrs.hip_compiler[RunInfo], @@ -86,23 +103,27 @@ def cxx_toolchain_impl(ctx): preprocessor_flags = cmd_args(ctx.attrs.rc_preprocessor_flags), ) if ctx.attrs.rc_compiler else None + linker_type = LinkerType(ctx.attrs.linker_type) linker_info = LinkerInfo( archiver = ctx.attrs.archiver[RunInfo], archiver_flags = cmd_args(ctx.attrs.archiver_flags), + archiver_reads_inputs = ctx.attrs.archiver_reads_inputs, archiver_supports_argfiles = ctx.attrs.archiver_supports_argfiles, archiver_type = ctx.attrs.archiver_type, archive_contents = ctx.attrs.archive_contents, archive_objects_locally = False, + archive_symbol_table = ctx.attrs.archive_symbol_table, binary_extension = value_or(ctx.attrs.binary_extension, ""), generate_linker_maps = ctx.attrs.generate_linker_maps, - is_pdb_generated = is_pdb_generated(ctx.attrs.linker_type, ctx.attrs.linker_flags), + is_pdb_generated = is_pdb_generated(linker_type, ctx.attrs.linker_flags), link_binaries_locally = not value_or(ctx.attrs.cache_links, True), link_libraries_locally = False, link_style = LinkStyle(ctx.attrs.link_style), link_weight = ctx.attrs.link_weight, link_ordering = ctx.attrs.link_ordering, linker = ctx.attrs.linker[RunInfo], - linker_flags = cmd_args(ctx.attrs.linker_flags).add(c_lto_flags), + linker_flags = cmd_args(ctx.attrs.linker_flags, c_lto_flags), + dist_thin_lto_codegen_flags = cmd_args(ctx.attrs.dist_thin_lto_codegen_flags) if ctx.attrs.dist_thin_lto_codegen_flags else None, post_linker_flags = cmd_args(ctx.attrs.post_linker_flags), lto_mode = lto_mode, mk_shlib_intf = ctx.attrs.shared_library_interface_producer, @@ -121,14 +142,14 @@ def cxx_toolchain_impl(ctx): static_dep_runtime_ld_flags = ctx.attrs.static_dep_runtime_ld_flags, static_library_extension = ctx.attrs.static_library_extension or "a", static_pic_dep_runtime_ld_flags = ctx.attrs.static_pic_dep_runtime_ld_flags, - type = ctx.attrs.linker_type, + type = linker_type, use_archiver_flags = ctx.attrs.use_archiver_flags, - produce_interface_from_stub_shared_library = ctx.attrs.produce_interface_from_stub_shared_library, ) utilities_info = BinaryUtilitiesInfo( nm = ctx.attrs.nm[RunInfo], objcopy = ctx.attrs.objcopy_for_shared_library_interface[RunInfo], + objdump = ctx.attrs.objdump[RunInfo] if ctx.attrs.objdump else None, ranlib = ctx.attrs.ranlib[RunInfo] if ctx.attrs.ranlib else None, strip = ctx.attrs.strip[RunInfo], dwp = None, @@ -145,6 +166,7 @@ def cxx_toolchain_impl(ctx): return [ DefaultInfo(), ] + cxx_toolchain_infos( + internal_tools = ctx.attrs._internal_tools[CxxInternalTools], platform_name = platform_name, linker_info = linker_info, binary_utilities_info = utilities_info, @@ -162,24 +184,29 @@ def cxx_toolchain_impl(ctx): object_format = CxxObjectFormat(object_format), headers_as_raw_headers_mode = HeadersAsRawHeadersMode(ctx.attrs.headers_as_raw_headers_mode) if ctx.attrs.headers_as_raw_headers_mode != None else None, conflicting_header_basename_allowlist = ctx.attrs.conflicting_header_basename_exemptions, - mk_hmap = ctx.attrs._mk_hmap[RunInfo], - mk_comp_db = ctx.attrs._mk_comp_db, pic_behavior = PicBehavior(ctx.attrs.pic_behavior), split_debug_mode = SplitDebugMode(ctx.attrs.split_debug_mode), strip_flags_info = strip_flags_info, # TODO(T138705365): Turn on dep files by default use_dep_files = value_or(ctx.attrs.use_dep_files, _get_default_use_dep_files(platform_name)), clang_remarks = ctx.attrs.clang_remarks, + gcno_files = value_or(ctx.attrs.gcno_files, False), clang_trace = value_or(ctx.attrs.clang_trace, False), cpp_dep_tracking_mode = DepTrackingMode(ctx.attrs.cpp_dep_tracking_mode), cuda_dep_tracking_mode = DepTrackingMode(ctx.attrs.cuda_dep_tracking_mode), dumpbin_toolchain_path = ctx.attrs._dumpbin_toolchain_path[DefaultInfo].default_outputs[0] if ctx.attrs._dumpbin_toolchain_path else None, + target_sdk_version = get_toolchain_target_sdk_version(ctx), + lipo = ctx.attrs.lipo[RunInfo] if ctx.attrs.lipo else None, + remap_cwd = ctx.attrs.remap_cwd, + optimization_compiler_flags_EXPERIMENTAL = ctx.attrs.optimization_compiler_flags_EXPERIMENTAL, ) def cxx_toolchain_extra_attributes(is_toolchain_rule): dep_type = attrs.exec_dep if is_toolchain_rule else attrs.dep return { + "archive_symbol_table": attrs.bool(default = True), "archiver": dep_type(providers = [RunInfo]), + "archiver_reads_inputs": attrs.bool(default = True), "archiver_supports_argfiles": attrs.bool(default = False), "asm_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), "asm_preprocessor": attrs.option(dep_type(providers = [RunInfo]), default = None), @@ -194,16 +221,22 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): "cuda_dep_tracking_mode": attrs.enum(DepTrackingMode.values(), default = "makefile"), "cvtres_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), "cxx_compiler": dep_type(providers = [RunInfo]), + "gcno_files": attrs.bool(default = False), "generate_linker_maps": attrs.bool(default = False), "hip_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), "link_ordering": attrs.enum(LinkOrdering.values(), default = "preorder"), "link_weight": attrs.int(default = 1), "linker": dep_type(providers = [RunInfo]), + "lipo": attrs.option(dep_type(providers = [RunInfo]), default = None), "llvm_link": attrs.option(dep_type(providers = [RunInfo]), default = None), "lto_mode": attrs.enum(LtoMode.values(), default = "none"), + # Darwin only: the minimum deployment target supported + "min_sdk_version": attrs.option(attrs.string(), default = None), "nm": dep_type(providers = [RunInfo]), "objcopy_for_shared_library_interface": dep_type(providers = [RunInfo]), + "objdump": attrs.option(dep_type(providers = [RunInfo]), default = None), "object_format": attrs.enum(CxxObjectFormat.values(), default = "native"), + "optimization_compiler_flags_EXPERIMENTAL": attrs.list(attrs.string(), default = []), "pic_behavior": attrs.enum(PicBehavior.values(), default = "supported"), # A placeholder tool that can be used to set up toolchain constraints. # Useful when fat and thin toolchahins share the same underlying tools via `command_alias()`, @@ -212,10 +245,10 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): # Used for resolving any 'platform_*' attributes. "platform_name": attrs.option(attrs.string(), default = None), "private_headers_symlinks_enabled": attrs.bool(default = True), - "produce_interface_from_stub_shared_library": attrs.bool(default = False), "public_headers_symlinks_enabled": attrs.bool(default = True), "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), "rc_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "remap_cwd": attrs.bool(default = False), "requires_objects": attrs.bool(default = False), "sanitizer_runtime_enabled": attrs.bool(default = False), "sanitizer_runtime_files": attrs.set(attrs.dep(), sorted = True, default = []), # Use `attrs.dep()` as it's not a tool, always propagate target platform @@ -224,10 +257,10 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): "split_debug_mode": attrs.enum(SplitDebugMode.values(), default = "none"), "strip": dep_type(providers = [RunInfo]), "supports_distributed_thinlto": attrs.bool(default = False), + # Darwin only: the deployment target to use for this build + "target_sdk_version": attrs.option(attrs.string(), default = None), "use_archiver_flags": attrs.bool(default = True), "use_dep_files": attrs.option(attrs.bool(), default = None), - "_dep_files_processor": dep_type(providers = [RunInfo], default = "prelude//cxx/tools:dep_file_processor"), - "_dist_lto_tools": attrs.default_only(dep_type(providers = [DistLtoToolsInfo], default = "prelude//cxx/dist_lto/tools:dist_lto_tools")), # TODO(scottcao): Figure out a slightly better way to integrate this. In theory, this is only needed for clang toolchain. # If we were using msvc, we should be able to use dumpbin directly. "_dumpbin_toolchain_path": attrs.default_only(attrs.option(dep_type(providers = [DefaultInfo]), default = select({ @@ -237,13 +270,12 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): # to fail, so I need a DEFAULT here when some target without cpu constraint tries to configure against the # windows exec platform. "DEFAULT": None, + # FIXME: prelude// should be standalone (not refer to fbsource//) "ovr_config//cpu:x86_32": "fbsource//arvr/third-party/toolchains/visual_studio:cl_x86_and_tools", "ovr_config//cpu:x86_64": "fbsource//arvr/third-party/toolchains/visual_studio:cl_x64_and_tools", }), }) if is_full_meta_repo() else None)), - "_mk_comp_db": attrs.default_only(dep_type(providers = [RunInfo], default = "prelude//cxx/tools:make_comp_db")), - # FIXME: prelude// should be standalone (not refer to fbsource//) - "_mk_hmap": attrs.default_only(dep_type(providers = [RunInfo], default = "prelude//cxx/tools:hmap_wrapper")), + "_internal_tools": attrs.default_only(dep_type(providers = [CxxInternalTools], default = "prelude//cxx/tools:internal_tools")), "_msvc_hermetic_exec": attrs.default_only(dep_type(providers = [RunInfo], default = "prelude//windows/tools:msvc_hermetic_exec")), } | cxx_toolchain_allow_cache_upload_args() @@ -290,14 +322,14 @@ def _get_shared_library_name_default_prefix(ctx: AnalysisContext) -> str: return "" if extension == "dll" else "lib" def _get_shared_library_name_format(ctx: AnalysisContext) -> str: - linker_type = ctx.attrs.linker_type + linker_type = LinkerType(ctx.attrs.linker_type) extension = ctx.attrs.shared_library_extension if extension == "": extension = LINKERS[linker_type].default_shared_library_extension return "{}." + extension def _get_shared_library_versioned_name_format(ctx: AnalysisContext) -> str: - linker_type = ctx.attrs.linker_type + linker_type = LinkerType(ctx.attrs.linker_type) extension_format = ctx.attrs.shared_library_versioned_extension_format.replace("%s", "{}") if extension_format == "": extension_format = LINKERS[linker_type].default_shared_library_versioned_extension_format diff --git a/prelude/cxx/cxx_toolchain_macro_layer.bzl b/prelude/cxx/cxx_toolchain_macro_layer.bzl index 4b38584ad91..adaeada5a36 100644 --- a/prelude/cxx/cxx_toolchain_macro_layer.bzl +++ b/prelude/cxx/cxx_toolchain_macro_layer.bzl @@ -7,12 +7,9 @@ def cxx_toolchain_macro_impl(cxx_toolchain_rule = None, **kwargs): # `cxx.linker_map_enabled` overrides toolchain behavior - linker_map_enabled = read_root_config("cxx", "linker_map_enabled") - if linker_map_enabled != None: - if linker_map_enabled.lower() == "true": - kwargs["generate_linker_maps"] = True - else: - kwargs["generate_linker_maps"] = False + if "generate_linker_maps" not in kwargs: + linker_map_enabled = read_root_config("cxx", "linker_map_enabled", "") + kwargs["generate_linker_maps"] = linker_map_enabled.lower() == "true" bitcode = read_root_config("cxx", "bitcode") if bitcode != None: diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index ffd05a17eb2..d58e80757c8 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -7,9 +7,9 @@ load("@prelude//cxx:debug.bzl", "SplitDebugMode") -LinkerType = ["gnu", "darwin", "windows", "wasm"] +LinkerType = enum("gnu", "darwin", "windows", "wasm") -ShlibInterfacesMode = enum("disabled", "enabled", "defined_only") +ShlibInterfacesMode = enum("disabled", "enabled", "defined_only", "stub_from_library", "stub_from_headers") # TODO(T110378149): Consider whether it makes sense to move these things to # configurations/constraints rather than part of the toolchain. @@ -18,13 +18,16 @@ LinkerInfo = provider( fields = { "archiver": provider_field(typing.Any, default = None), "archiver_flags": provider_field(typing.Any, default = None), + "archiver_reads_inputs": provider_field(bool, default = True), "archiver_supports_argfiles": provider_field(typing.Any, default = None), "archiver_type": provider_field(typing.Any, default = None), "archive_contents": provider_field(typing.Any, default = None), "archive_objects_locally": provider_field(typing.Any, default = None), + "archive_symbol_table": provider_field(bool, default = True), # "archiver_platform", # "" on Unix, "exe" on Windows "binary_extension": provider_field(typing.Any, default = None), # str + "dist_thin_lto_codegen_flags": provider_field([cmd_args, None], default = None), "generate_linker_maps": provider_field(typing.Any, default = None), # bool # Whether to run native links locally. We support this for fbcode platforms # to avoid issues with C++ static links (see comment in @@ -39,11 +42,11 @@ LinkerInfo = provider( "link_ordering": provider_field(typing.Any, default = None), # LinkOrdering "linker": provider_field(typing.Any, default = None), "linker_flags": provider_field(typing.Any, default = None), - "post_linker_flags": provider_field(typing.Any, default = None), "lto_mode": provider_field(typing.Any, default = None), "mk_shlib_intf": provider_field(typing.Any, default = None), # "o" on Unix, "obj" on Windows "object_file_extension": provider_field(typing.Any, default = None), # str + "post_linker_flags": provider_field(typing.Any, default = None), "sanitizer_runtime_enabled": provider_field(bool, default = False), "sanitizer_runtime_files": provider_field(list[Artifact], default = []), "shlib_interfaces": provider_field(ShlibInterfacesMode), @@ -61,11 +64,10 @@ LinkerInfo = provider( "requires_objects": provider_field(typing.Any, default = None), "supports_distributed_thinlto": provider_field(typing.Any, default = None), "independent_shlib_interface_linker_flags": provider_field(typing.Any, default = None), - "type": provider_field(typing.Any, default = None), # of "LinkerType" type + "type": LinkerType, "use_archiver_flags": provider_field(typing.Any, default = None), "force_full_hybrid_if_capable": provider_field(typing.Any, default = None), "is_pdb_generated": provider_field(typing.Any, default = None), # bool - "produce_interface_from_stub_shared_library": provider_field(typing.Any, default = None), # bool }, ) @@ -74,6 +76,7 @@ BinaryUtilitiesInfo = provider(fields = { "dwp": provider_field(typing.Any, default = None), "nm": provider_field(typing.Any, default = None), "objcopy": provider_field(typing.Any, default = None), + "objdump": provider_field(typing.Any, default = None), "ranlib": provider_field(typing.Any, default = None), "strip": provider_field(typing.Any, default = None), }) @@ -118,9 +121,9 @@ _compiler_fields = [ "preprocessor", "preprocessor_type", "preprocessor_flags", - "dep_files_processor", # Controls cache upload for object files "allow_cache_upload", + "supports_two_phase_compilation", ] HipCompilerInfo = provider(fields = _compiler_fields) @@ -132,10 +135,22 @@ CxxCompilerInfo = provider(fields = _compiler_fields) AsmCompilerInfo = provider(fields = _compiler_fields) AsCompilerInfo = provider(fields = _compiler_fields) -DistLtoToolsInfo = provider( - # @unsorted-dict-items - fields = {"planner": provider_field(typing.Any, default = None), "opt": provider_field(typing.Any, default = None), "prepare": provider_field(typing.Any, default = None), "copy": provider_field(typing.Any, default = None)}, -) +DistLtoToolsInfo = provider(fields = dict( + planner = dict[LinkerType, RunInfo], + opt = dict[LinkerType, RunInfo], + prepare = RunInfo, + copy = RunInfo, +)) + +CxxInternalTools = provider(fields = dict( + concatenate_diagnostics = RunInfo, + dep_file_processor = RunInfo, + dist_lto = DistLtoToolsInfo, + hmap_wrapper = RunInfo, + make_comp_db = RunInfo, + remap_cwd = RunInfo, + stderr_to_file = RunInfo, +)) CxxObjectFormat = enum( "native", @@ -172,6 +187,7 @@ PicBehavior = enum( CxxToolchainInfo = provider( # @unsorted-dict-items fields = { + "internal_tools": provider_field(CxxInternalTools), "conflicting_header_basename_allowlist": provider_field(typing.Any, default = None), "use_distributed_thinlto": provider_field(typing.Any, default = None), "header_mode": provider_field(typing.Any, default = None), @@ -187,12 +203,10 @@ CxxToolchainInfo = provider( "cuda_compiler_info": provider_field(typing.Any, default = None), "cvtres_compiler_info": provider_field(typing.Any, default = None), "rc_compiler_info": provider_field(typing.Any, default = None), - "mk_comp_db": provider_field(typing.Any, default = None), - "mk_hmap": provider_field(typing.Any, default = None), "llvm_link": provider_field(typing.Any, default = None), - "dist_lto_tools_info": provider_field(typing.Any, default = None), "use_dep_files": provider_field(typing.Any, default = None), "clang_remarks": provider_field(typing.Any, default = None), + "gcno_files": provider_field(typing.Any, default = None), "clang_trace": provider_field(typing.Any, default = None), "cpp_dep_tracking_mode": provider_field(typing.Any, default = None), "cuda_dep_tracking_mode": provider_field(typing.Any, default = None), @@ -201,6 +215,10 @@ CxxToolchainInfo = provider( "bolt_enabled": provider_field(typing.Any, default = None), "pic_behavior": provider_field(typing.Any, default = None), "dumpbin_toolchain_path": provider_field(typing.Any, default = None), + "target_sdk_version": provider_field([str, None], default = None), + "lipo": provider_field([RunInfo, None], default = None), + "remap_cwd": provider_field(bool, default = False), + "optimization_compiler_flags_EXPERIMENTAL": provider_field(typing.Any, default = []), }, ) @@ -218,9 +236,6 @@ def _validate_linker_info(info: LinkerInfo): if info.requires_archives and info.requires_objects: fail("only one of `requires_archives` and `requires_objects` can be enabled") - if info.supports_distributed_thinlto and not info.requires_objects: - fail("distributed thinlto requires enabling `requires_objects`") - def is_bitcode_format(format: CxxObjectFormat) -> bool: return format in [CxxObjectFormat("bitcode"), CxxObjectFormat("embedded-bitcode")] @@ -231,6 +246,7 @@ def cxx_toolchain_infos( linker_info, binary_utilities_info, header_mode, + internal_tools: CxxInternalTools, headers_as_raw_headers_mode = None, conflicting_header_basename_allowlist = [], asm_compiler_info = None, @@ -240,22 +256,24 @@ def cxx_toolchain_infos( cvtres_compiler_info = None, rc_compiler_info = None, object_format = CxxObjectFormat("native"), - mk_comp_db = None, - mk_hmap = None, use_distributed_thinlto = False, use_dep_files = False, clang_remarks = None, + gcno_files = None, clang_trace = False, cpp_dep_tracking_mode = DepTrackingMode("none"), cuda_dep_tracking_mode = DepTrackingMode("none"), strip_flags_info = None, - dist_lto_tools_info: [DistLtoToolsInfo, None] = None, split_debug_mode = SplitDebugMode("none"), bolt_enabled = False, llvm_link = None, platform_deps_aliases = [], pic_behavior = PicBehavior("supported"), - dumpbin_toolchain_path = None): + dumpbin_toolchain_path = None, + target_sdk_version = None, + lipo = None, + remap_cwd = False, + optimization_compiler_flags_EXPERIMENTAL = []): """ Creates the collection of cxx-toolchain Infos for a cxx toolchain. @@ -268,6 +286,7 @@ def cxx_toolchain_infos( _validate_linker_info(linker_info) toolchain_info = CxxToolchainInfo( + internal_tools = internal_tools, conflicting_header_basename_allowlist = conflicting_header_basename_allowlist, header_mode = header_mode, headers_as_raw_headers_mode = headers_as_raw_headers_mode, @@ -282,13 +301,11 @@ def cxx_toolchain_infos( cuda_compiler_info = cuda_compiler_info, cvtres_compiler_info = cvtres_compiler_info, rc_compiler_info = rc_compiler_info, - mk_comp_db = mk_comp_db, - mk_hmap = mk_hmap, object_format = object_format, - dist_lto_tools_info = dist_lto_tools_info, use_distributed_thinlto = use_distributed_thinlto, use_dep_files = use_dep_files, clang_remarks = clang_remarks, + gcno_files = gcno_files, clang_trace = clang_trace, cpp_dep_tracking_mode = cpp_dep_tracking_mode, cuda_dep_tracking_mode = cuda_dep_tracking_mode, @@ -297,6 +314,10 @@ def cxx_toolchain_infos( bolt_enabled = bolt_enabled, pic_behavior = pic_behavior, dumpbin_toolchain_path = dumpbin_toolchain_path, + target_sdk_version = target_sdk_version, + lipo = lipo, + remap_cwd = remap_cwd, + optimization_compiler_flags_EXPERIMENTAL = optimization_compiler_flags_EXPERIMENTAL, ) # Provide placeholder mappings, used primarily by cxx_genrule. diff --git a/prelude/cxx/cxx_types.bzl b/prelude/cxx/cxx_types.bzl index 62cd70bb3f2..940e29da7c0 100644 --- a/prelude/cxx/cxx_types.bzl +++ b/prelude/cxx/cxx_types.bzl @@ -5,7 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:artifact_tset.bzl", "ArtifactTSet") # @unused Used as a type +load("@prelude//:artifact_tset.bzl", "ArtifactInfoTag", "ArtifactTSet") +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type +) load( "@prelude//linking:link_info.bzl", "LinkArgs", @@ -21,7 +25,7 @@ load( ) load(":argsfiles.bzl", "CompileArgsfiles") load( - ":compile.bzl", + ":cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type ) load( @@ -30,7 +34,6 @@ load( ) load( ":link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLibSpec", # @unused Used as a type ) load( @@ -47,6 +50,13 @@ load( "cxx_populate_xcode_attributes", ) +CxxLibraryInfo = provider( + fields = dict( + target = provider_field(Label), + labels = provider_field(list[str]), + ), +) + # Parameters to control which sub targets to define when processing Cxx rules. # By default, generates all subtargets. CxxRuleSubTargetParams = record( @@ -60,6 +70,7 @@ CxxRuleSubTargetParams = record( xcode_data = field(bool, True), objects = field(bool, True), bitcode_bundle = field(bool, True), + header_unit = field(bool, True), ) # Parameters to control which providers to define when processing Cxx rules. @@ -69,6 +80,7 @@ CxxRuleProviderParams = record( default = field(bool, True), java_packaging_info = field(bool, True), android_packageable_info = field(bool, True), + java_global_code_info = field(bool, True), linkable_graph = field(bool, True), link_style_outputs = field(bool, True), merged_native_link_info = field(bool, True), @@ -80,6 +92,7 @@ CxxRuleProviderParams = record( shared_libraries = field(bool, True), template_placeholders = field(bool, True), preprocessor_for_tests = field(bool, True), + third_party_build = field(bool, False), ) # Parameters to handle non-Clang sources, e.g Swift on Apple's platforms. @@ -94,6 +107,8 @@ CxxRuleAdditionalParams = record( subtargets = field(dict, {}), # [str: ["provider"]] # Might be used to expose additional providers to cxx layer (e.g to support #headers subtarget for Swift) additional_providers_factory = field([typing.Callable, None], None), # ([CPreprocessorInfo, None]) -> ["provider"]: + # The list of tags that should be applied to generated ArtifactTSet of debug information. + external_debug_info_tags = field(list[ArtifactInfoTag], []), ) # Parameters that allows to configure/extend generic implementation of C++ rules. @@ -104,15 +119,20 @@ CxxRuleAdditionalParams = record( # different and need to be specified. The following record holds the data which # is needed to specialize user-facing rule from generic implementation. CxxRuleConstructorParams = record( + #Required + + # Name of the top level rule utilizing the cxx rule. + rule_type = str, + # Header layout to use importing headers. + headers_layout = CxxHeadersLayout, + + #Optional + # Whether to build an empty shared library. This is utilized for rust_python_extensions # so that they can link against the rust shared object. build_empty_so = field(bool, False), - # Name of the top level rule utilizing the cxx rule. - rule_type = str, # If the rule is a test. is_test = field(bool, False), - # Header layout to use importing headers. - headers_layout = CxxHeadersLayout, # Additional information used to preprocess every unit of translation in the rule. extra_preprocessors = field(list[CPreprocessor], []), extra_preprocessors_info = field(list[CPreprocessorInfo], []), @@ -151,6 +171,8 @@ CxxRuleConstructorParams = record( soname = field([str, None], None), # Optional argument to override the default name of the executable being produced. executable_name = field([str, None], None), + # Optional argument to set the deffile for the windows linker on a dll + deffile = field([Artifact, None], None), # If passed to cxx_executable, this field will be used to determine # a shared subtarget's default output should be stripped. strip_executable = field(bool, False), @@ -175,7 +197,7 @@ CxxRuleConstructorParams = record( # shared libs to include in the symlink tree). extra_link_roots = field(list[LinkableProviders], []), # Additional shared libs to "package". - extra_shared_libs = field(dict[str, SharedLibrary], {}), + extra_shared_libs = field(list[SharedLibrary], []), auto_link_group_specs = field([list[LinkGroupLibSpec], None], None), link_group_info = field([LinkGroupInfo, None], None), # Whether to use pre-stripped objects when linking. @@ -189,4 +211,30 @@ CxxRuleConstructorParams = record( extra_linker_outputs_factory = field(typing.Callable, lambda _context: ([], {})), # Whether to allow cache uploads for locally-linked executables. exe_allow_cache_upload = field(bool, False), + # Extra shared library interfaces to propagate, eg from mixed Swift libraries. + extra_shared_library_interfaces = field([list[Artifact], None], None), + # Compiler flags + compiler_flags = field(list[typing.Any], []), + lang_compiler_flags = field(dict[typing.Any, typing.Any], {}), + # Platform compiler flags + platform_compiler_flags = field(list[(str, typing.Any)], []), + lang_platform_compiler_flags = field(dict[typing.Any, typing.Any], {}), + # Preprocessor flags + preprocessor_flags = field(list[typing.Any], []), + lang_preprocessor_flags = field(dict[typing.Any, typing.Any], {}), + # Platform preprocessor flags + platform_preprocessor_flags = field(list[(str, typing.Any)], []), + lang_platform_preprocessor_flags = field(dict[typing.Any, typing.Any], {}), + # modulename-Swift.h header for building objc targets that rely on this swift dep + swift_objc_header = field([Artifact, None], None), + error_handler = field([typing.Callable, None], None), + index_store_factory = field(typing.Callable | None, None), + # Swift index stores to propagate + index_stores = field(list[Artifact] | None, None), + # Whether to add header units from dependencies to the command line. + use_header_units = field(bool, False), + # Whether to export a header unit to all dependents. + export_header_unit = field([str, None], None), + # Filter what headers to include in header units. + export_header_unit_filter = field(list[str], []), ) diff --git a/prelude/cxx/cxx_utility.bzl b/prelude/cxx/cxx_utility.bzl index a221419d8f7..0647e285f0e 100644 --- a/prelude/cxx/cxx_utility.bzl +++ b/prelude/cxx/cxx_utility.bzl @@ -24,6 +24,11 @@ def cxx_toolchain_allow_cache_upload_args(): default = None, doc = doc, ), + "cuda_compiler_allow_cache_upload": attrs.option( + attrs.bool(), + default = None, + doc = doc, + ), "cxx_compiler_allow_cache_upload": attrs.option( attrs.bool(), default = None, diff --git a/prelude/cxx/diagnostics.bzl b/prelude/cxx/diagnostics.bzl new file mode 100644 index 00000000000..a7166d4be16 --- /dev/null +++ b/prelude/cxx/diagnostics.bzl @@ -0,0 +1,37 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//utils:expect.bzl", "expect") +load(":cxx_context.bzl", "get_cxx_toolchain_info") + +def check_sub_target( + ctx: AnalysisContext, + diagnostics: dict[str, Artifact]) -> list[Provider]: + expect(len(diagnostics) > 0) + + if len(diagnostics) == 1: + all_diagnostics = diagnostics.values()[0] + else: + toolchain = get_cxx_toolchain_info(ctx) + concatenate_diagnostics_tool = toolchain.internal_tools.concatenate_diagnostics + all_diagnostics = ctx.actions.declare_output("diagnostics.txt") + ctx.actions.run( + [ + concatenate_diagnostics_tool, + cmd_args(all_diagnostics.as_output(), format = "--out={}"), + diagnostics.values(), + ], + category = "diagnostics", + ) + + return [DefaultInfo( + default_output = all_diagnostics, + sub_targets = { + short_path: [DefaultInfo(default_output = diagnostics)] + for short_path, diagnostics in diagnostics.items() + }, + )] diff --git a/prelude/cxx/dist_lto/README.md b/prelude/cxx/dist_lto/README.md index 88a4b80a759..d2b4f665bbf 100644 --- a/prelude/cxx/dist_lto/README.md +++ b/prelude/cxx/dist_lto/README.md @@ -249,7 +249,7 @@ contains a list of 7-tuples, whose members are: 1. The path to the source bitcode file. This is used as an index into a dictionary that records much of the metadata coming from these lines. -2. The path to an output file. `dist_lto_planner.py`is expected to place a +2. The path to an output file. `dist_lto_planner.py` is expected to place a ThinLTO index file at this location (suffixed `.thinlto.bc`). 3. The path to an output plan. This script is expected to place a link plan here (a JSON document indicating which other object files this) object file diff --git a/prelude/cxx/dist_lto/darwin_dist_lto.bzl b/prelude/cxx/dist_lto/darwin_dist_lto.bzl new file mode 100644 index 00000000000..6476c5e4703 --- /dev/null +++ b/prelude/cxx/dist_lto/darwin_dist_lto.bzl @@ -0,0 +1,597 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load( + "@prelude//:artifact_tset.bzl", + "ArtifactTSet", +) +load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load( + "@prelude//cxx:cxx_link_utility.bzl", + "cxx_link_cmd_parts", + "get_extra_darwin_linker_flags", + "linker_map_args", +) +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerType") +load("@prelude//cxx:link_types.bzl", "LinkOptions") +load("@prelude//cxx:target_sdk_version.bzl", "get_target_sdk_version_flags") +load( + "@prelude//linking:link_info.bzl", + "ArchiveLinkable", + "FrameworksLinkable", # @unused Used as a type + "LinkInfo", + "LinkedObject", + "ObjectsLinkable", + "SharedLibLinkable", # @unused Used as a type + "SwiftRuntimeLinkable", # @unused Used as a type + "SwiftmoduleLinkable", # @unused Used as a type + "append_linkable_args", + "map_to_link_infos", +) +load("@prelude//linking:strip.bzl", "strip_object") +load("@prelude//utils:argfile.bzl", "at_argfile") +load("@prelude//utils:lazy.bzl", "lazy") + +_BitcodeLinkData = record( + name = str, + initial_object = Artifact, + bc_file = Artifact, + plan = Artifact, + opt_object = Artifact, +) + +_ArchiveLinkData = record( + name = str, + manifest = Artifact, + # A file containing paths to artifacts that are known to reside in opt_objects_dir. + opt_manifest = Artifact, + objects_dir = Artifact, + opt_objects_dir = Artifact, + indexes_dir = Artifact, + plan = Artifact, + link_whole = bool, +) + +_DynamicLibraryLinkData = record( + linkable = SharedLibLinkable, +) + +_DataType = enum( + "bitcode", + "archive", + "dynamic_library", +) + +_IndexLinkData = record( + data_type = _DataType, + link_data = field([_BitcodeLinkData, _ArchiveLinkData, _DynamicLibraryLinkData]), +) + +def cxx_darwin_dist_link( + ctx: AnalysisContext, + # The destination for the link output. + output: Artifact, + opts: LinkOptions, + linker_map: Artifact | None = None) -> LinkedObject: + """ + Perform a distributed thin-lto link into the supplied output + + Distributed thinlto splits the link into three stages: + 1. global "indexing" step + 2. many individual compilation unit optimization steps + 3. final global link step + + The 2nd and 3rd of those are done just by constructing compiler/linker commands (in dynamic_output + sections) using the output of the first. + + For the first, we need to post-process the linker index output to get it into a form + that is easy for us to consume from within bzl. + """ + + links = opts.links + + # A category suffix that will be added to the category of the link action that is generated. + category_suffix = opts.category_suffix + + # An identifier that will uniquely name this link action in the context of a category. Useful for + # differentiating multiple link actions in the same rule. + identifier = opts.identifier + + def make_cat(c: str) -> str: + """ Used to make sure categories for our actions include the provided suffix """ + if category_suffix != None: + return c + "_" + category_suffix + return c + + def make_id(i: str) -> str: + """ Used to make sure identifiers for our actions include the provided identifier """ + if identifier != None: + return identifier + "_" + i + return i + + recorded_outputs = {} + + def name_for_obj(link_name: str, object_artifact: Artifact) -> str: + """ Creates a unique name/path we can use for a particular object file input """ + prefix = "{}/{}".format(link_name, object_artifact.short_path) + + # it's possible (though unlikely) that we can get duplicate name/short_path, so just uniquify them + if prefix in recorded_outputs: + recorded_outputs[prefix] += 1 + extra = recorded_outputs[prefix] + prefix = "{}-{}".format(prefix, extra) + else: + recorded_outputs[prefix] = 1 + return prefix + + names = {} + + def name_for_link(info: LinkInfo) -> str: + """ Creates a unique name for a LinkInfo that we are consuming """ + name = info.name or "unknown" + if name not in names: + names[name] = 1 + else: + names[name] += 1 + name += "-{}".format(names[name]) + return make_id(name) + + link_infos = map_to_link_infos(links) + + cxx_toolchain = get_cxx_toolchain_info(ctx) + lto_planner = cxx_toolchain.internal_tools.dist_lto.planner[LinkerType("darwin")] + lto_opt = cxx_toolchain.internal_tools.dist_lto.opt[LinkerType("darwin")] + lto_prepare = cxx_toolchain.internal_tools.dist_lto.prepare + lto_copy = cxx_toolchain.internal_tools.dist_lto.copy + + unsorted_index_link_data = [] + linker_flags = [] + common_link_flags = cmd_args(get_target_sdk_version_flags(ctx), get_extra_darwin_linker_flags()) + extra_codegen_flags = get_target_sdk_version_flags(ctx) + + # Information used to construct the dynamic plan: + plan_inputs = [] + plan_outputs = [] + + # Information used to construct the opt dynamic outputs: + archive_opt_manifests = [] + + prepare_cat = make_cat("thin_lto_prepare") + + for link in link_infos: + link_name = name_for_link(link) + + linker_flags.append(link.pre_flags) + linker_flags.append(link.post_flags) + + for linkable in link.linkables: + if isinstance(linkable, ObjectsLinkable): + for obj in linkable.objects: + name = name_for_obj(link_name, obj) + bc_output = ctx.actions.declare_output(name + ".thinlto.bc") + plan_output = ctx.actions.declare_output(name + ".opt.plan") + opt_output = ctx.actions.declare_output(name + ".opt.o") + + data = _IndexLinkData( + data_type = _DataType("bitcode"), + link_data = _BitcodeLinkData( + name = name, + initial_object = obj, + bc_file = bc_output, + plan = plan_output, + opt_object = opt_output, + ), + ) + unsorted_index_link_data.append(data) + plan_outputs.extend([bc_output.as_output(), plan_output.as_output()]) + elif isinstance(linkable, ArchiveLinkable): + # Our implementation of Distributed ThinLTO operates on individual objects, not archives. Since these + # archives might still contain LTO-able bitcode, we first extract the objects within the archive into + # another directory and write a "manifest" containing the list of objects that the archive contained. + # + # Later actions in the LTO compilation pipeline will read this manifest and dynamically dispatch + # actions on the objects that the manifest reports. + + name = name_for_obj(link_name, linkable.archive.artifact) + archive_manifest = ctx.actions.declare_output("%s/%s/manifest.json" % (prepare_cat, name)) + archive_objects = ctx.actions.declare_output("%s/%s/objects" % (prepare_cat, name), dir = True) + archive_opt_objects = ctx.actions.declare_output("%s/%s/opt_objects" % (prepare_cat, name), dir = True) + archive_indexes = ctx.actions.declare_output("%s/%s/indexes" % (prepare_cat, name), dir = True) + archive_plan = ctx.actions.declare_output("%s/%s/plan.json" % (prepare_cat, name)) + archive_opt_manifest = ctx.actions.declare_output("%s/%s/opt_objects.manifest" % (prepare_cat, name)) + prepare_args = cmd_args([ + lto_prepare, + "--manifest-out", + archive_manifest.as_output(), + "--objects-out", + archive_objects.as_output(), + "--ar", + cxx_toolchain.linker_info.archiver, + "--archive", + linkable.archive.artifact, + "--name", + name, + ]) + ctx.actions.run(prepare_args, category = make_cat("thin_lto_prepare"), identifier = name) + + data = _IndexLinkData( + data_type = _DataType("archive"), + link_data = _ArchiveLinkData( + name = name, + manifest = archive_manifest, + opt_manifest = archive_opt_manifest, + objects_dir = archive_objects, + opt_objects_dir = archive_opt_objects, + indexes_dir = archive_indexes, + plan = archive_plan, + link_whole = linkable.link_whole, + ), + ) + unsorted_index_link_data.append(data) + archive_opt_manifests.append(archive_opt_manifest) + plan_inputs.extend([archive_manifest, archive_objects]) + plan_outputs.extend([archive_indexes.as_output(), archive_plan.as_output()]) + elif isinstance(linkable, SharedLibLinkable): + data = _IndexLinkData( + data_type = _DataType("dynamic_library"), + link_data = _DynamicLibraryLinkData(linkable = linkable), + ) + unsorted_index_link_data.append(data) + elif isinstance(linkable, FrameworksLinkable) or isinstance(linkable, SwiftRuntimeLinkable) or isinstance(linkable, SwiftmoduleLinkable): + # These linkables are handled separately for flag deduplication purposes, as in append_linkable_args: + # https://www.internalfb.com/code/fbsource/[c6d2c820b394]/fbcode/buck2/prelude/linking/link_info.bzl?lines=271-278 + pass + else: + fail("Unhandled linkable type: {}".format(str(linkable))) + + def sort_index_link_data(input_list: list[_IndexLinkData]) -> list[_IndexLinkData]: + # Sort link datas to reduce binary size. The idea is to encourage the linker to load the minimal number of object files possible. We load force loaded archives first (since they will be loaded no matter what), then non lazy object files (which will also be loaded no matter what), then shared libraries (to share as many symbols as possible), then finally regular archives + force_loaded_archives = [] + regular_archives = [] + object_files = [] + dynamic_libraries = [] + for link_data in input_list: + if link_data.data_type == _DataType("bitcode"): + object_files.append(link_data) + elif link_data.data_type == _DataType("archive"): + if link_data.link_data.link_whole: + force_loaded_archives.append(link_data) + else: + regular_archives.append(link_data) + elif link_data.data_type == _DataType("dynamic_library"): + dynamic_libraries.append(link_data) + + return force_loaded_archives + object_files + dynamic_libraries + regular_archives + + sorted_index_link_data = sort_index_link_data(unsorted_index_link_data) + + index_argsfile_out = ctx.actions.declare_output(output.basename + ".thinlto_index_argsfile") + final_link_index = ctx.actions.declare_output(output.basename + ".final_link_index") + + def prepare_index_flags(include_inputs: bool, index_args_out: cmd_args, index_meta_args_out: cmd_args, ctx: AnalysisContext, artifacts, outputs): + for flag in linker_flags: + index_args_out.add(flag) + + if include_inputs: + # buildifier: disable=uninitialized + for idx, artifact in enumerate(sorted_index_link_data): + link_data = artifact.link_data + + if artifact.data_type == _DataType("bitcode"): + index_args_out.add(link_data.initial_object) + index_meta_args_out.add(link_data.initial_object, outputs[link_data.bc_file].as_output(), outputs[link_data.plan].as_output(), str(idx), "", "", "") + + elif artifact.data_type == _DataType("archive"): + manifest = artifacts[link_data.manifest].read_json() + + if not manifest["objects"]: + # Despite not having any objects (and thus not needing a plan), we still need to bind the plan output. + ctx.actions.write(outputs[link_data.plan].as_output(), "{}") + cmd = cmd_args(["/bin/sh", "-c", "mkdir", "-p", outputs[link_data.indexes_dir].as_output()]) + ctx.actions.run(cmd, category = make_cat("thin_lto_mkdir"), identifier = link_data.name) + continue + + index_args_out.add(cmd_args(hidden = link_data.objects_dir)) + + if not link_data.link_whole: + index_args_out.add("-Wl,--start-lib") + + for obj in manifest["objects"]: + index_meta_args_out.add(obj, "", "", str(idx), link_data.name, outputs[link_data.plan].as_output(), outputs[link_data.indexes_dir].as_output()) + index_args_out.add(obj) + + if not link_data.link_whole: + index_args_out.add("-Wl,--end-lib") + + elif artifact.data_type == _DataType("dynamic_library"): + append_linkable_args(index_args_out, link_data.linkable) + + else: + fail("Unhandled data type: {}".format(str(artifact.data_type))) + + output_as_string = cmd_args(output, ignore_artifacts = True) + index_args_out.add("-o", output_as_string) + + # The flags used for the thin-link action. Unlike index_args, this does not include input files, and + # is only used for debugging and testing, and can be determined without dynamic output. + index_flags_for_debugging = cmd_args() + index_cmd_parts = cxx_link_cmd_parts(cxx_toolchain) + index_flags_for_debugging.add(index_cmd_parts.linker_flags) + index_flags_for_debugging.add(common_link_flags) + index_flags_for_debugging.add(index_cmd_parts.post_linker_flags) + prepare_index_flags(include_inputs = False, index_args_out = index_flags_for_debugging, index_meta_args_out = cmd_args(), ctx = ctx, artifacts = None, outputs = None) + index_flags_for_debugging_argsfile, _ = ctx.actions.write(output.basename + ".thinlto_index_debugging_argsfile", index_flags_for_debugging, allow_args = True) + + def dynamic_plan(link_plan: Artifact, index_argsfile_out: Artifact, final_link_index: Artifact): + def plan(ctx: AnalysisContext, artifacts, outputs): + # index link command args + index_args = cmd_args() + + # See comments in dist_lto_planner.py for semantics on the values that are pushed into index_meta. + index_meta = cmd_args() + + prepare_index_flags(include_inputs = True, index_args_out = index_args, index_meta_args_out = index_meta, ctx = ctx, artifacts = artifacts, outputs = outputs) + + index_argfile, _ = ctx.actions.write( + outputs[index_argsfile_out].as_output(), + index_args, + allow_args = True, + ) + + index_cat = make_cat("thin_lto_index") + index_file_out = ctx.actions.declare_output(make_id(index_cat) + "/index") + index_out_dir = cmd_args(index_file_out.as_output(), parent = 1) + + index_cmd_parts = cxx_link_cmd_parts(cxx_toolchain) + + index_cmd = index_cmd_parts.link_cmd + index_cmd.add(common_link_flags) + index_cmd.add(cmd_args(index_argfile, format = "@{}")) + + index_cmd.add(cmd_args(index_file_out.as_output(), format = "-Wl,--thinlto-index-only={}")) + index_cmd.add("-Wl,--thinlto-emit-imports-files") + index_cmd.add("-Wl,--thinlto-full-index") + index_cmd.add(cmd_args(index_out_dir, format = "-Wl,--thinlto-prefix-replace=;{}/")) + index_cmd.add(index_cmd_parts.post_linker_flags) + + # Terminate the index file with a newline. + index_meta.add("") + index_meta_file = ctx.actions.write( + output.basename + ".thinlto.meta", + index_meta, + ) + + plan_cmd = cmd_args([lto_planner, "--meta", index_meta_file, "--index", index_out_dir, "--link-plan", outputs[link_plan].as_output(), "--final-link-index", outputs[final_link_index].as_output(), "--"]) + plan_cmd.add(index_cmd) + + plan_cmd.add(cmd_args(hidden = [ + index_meta, + index_args, + ])) + + ctx.actions.run(plan_cmd, category = index_cat, identifier = identifier, local_only = True) + + # TODO(T117513091) - dynamic_output does not allow for an empty list of dynamic inputs. If we have no archives + # to process, we will have no dynamic inputs, and the plan action can be non-dynamic. + # + # However, buck2 disallows `dynamic_output` with a empty input list. We also can't call our `plan` function + # directly, since it uses `ctx.outputs` to bind its outputs. Instead of doing Starlark hacks to work around + # the lack of `ctx.outputs`, we declare an empty file as a dynamic input. + plan_inputs.append(ctx.actions.write(output.basename + ".plan_hack.txt", "")) + plan_outputs.extend([link_plan.as_output(), index_argsfile_out.as_output(), final_link_index.as_output()]) + ctx.actions.dynamic_output(dynamic = plan_inputs, inputs = [], outputs = plan_outputs, f = plan) + + link_plan_out = ctx.actions.declare_output(output.basename + ".link-plan.json") + dynamic_plan(link_plan = link_plan_out, index_argsfile_out = index_argsfile_out, final_link_index = final_link_index) + + def prepare_opt_flags(link_infos: list[LinkInfo]) -> cmd_args: + opt_flags = cmd_args(cxx_toolchain.linker_info.dist_thin_lto_codegen_flags) + opt_flags.add(extra_codegen_flags) + for link in link_infos: + opt_flags.add(link.dist_thin_lto_codegen_flags) + return opt_flags + + common_opt_cmd = cmd_args(cxx_toolchain.linker_info.linker) + common_opt_cmd.add(prepare_opt_flags(link_infos)) + + # Create an argsfile and dump all the flags to be processed later by lto_opt. + # These flags are common to all opt actions, we don't need an argfile for each action, one + # for the entire link unit will do. + opt_argsfile = ctx.actions.declare_output(output.basename + ".lto_opt_argsfile") + ctx.actions.write(opt_argsfile.as_output(), common_opt_cmd, allow_args = True) + + # We don't want the linker itself in the argsfile for debugging / testing codegen flags + opt_flags_for_debugging = prepare_opt_flags(link_infos) + opt_flags_for_debugging_argsfile = ctx.actions.declare_output(output.basename + ".thin_lto_codegen_debugging_argsfile") + ctx.actions.write(opt_flags_for_debugging_argsfile.as_output(), opt_flags_for_debugging, allow_args = True) + + # We declare a separate dynamic_output for every object file. It would + # maybe be simpler to have a single dynamic_output that produced all the + # opt actions, but an action needs to re-run whenever the analysis that + # produced it re-runs. And so, with a single dynamic_output, we'd need to + # re-run all actions when any of the plans changed. + def dynamic_optimize(name: str, initial_object: Artifact, bc_file: Artifact, plan: Artifact, opt_object: Artifact): + def optimize_object(ctx: AnalysisContext, artifacts, outputs): + plan_json = artifacts[plan].read_json() + + # If the object was not compiled with thinlto flags, then there + # won't be valid outputs for it from the indexing, but we still + # need to bind the artifact. Similarily, if a bitcode file is not + # loaded by the indexing phase, there is no point optimizing it. + if "not_loaded_by_linker" in plan_json or not plan_json["is_bc"]: + ctx.actions.write(outputs[opt_object], "") + return + + opt_cmd = cmd_args(lto_opt) + opt_cmd.add("--out", outputs[opt_object].as_output()) + opt_cmd.add("--input", initial_object) + opt_cmd.add("--index", bc_file) + + opt_cmd.add(cmd_args(hidden = common_opt_cmd)) + opt_cmd.add("--args", opt_argsfile) + + opt_cmd.add("--") + opt_cmd.add(cxx_toolchain.cxx_compiler_info.compiler) + + imports = [sorted_index_link_data[idx].link_data.initial_object for idx in plan_json["imports"]] + archives = [sorted_index_link_data[idx].link_data.objects_dir for idx in plan_json["archive_imports"]] + opt_cmd.add(cmd_args(hidden = imports + archives)) + ctx.actions.run(opt_cmd, category = make_cat("thin_lto_opt_object"), identifier = name) + + ctx.actions.dynamic_output(dynamic = [plan], inputs = [], outputs = [opt_object.as_output()], f = optimize_object) + + def dynamic_optimize_archive(archive: _ArchiveLinkData): + def optimize_archive(ctx: AnalysisContext, artifacts, outputs): + plan_json = artifacts[archive.plan].read_json() + if "objects" not in plan_json or not plan_json["objects"] or lazy.is_all(lambda e: not e["is_bc"], plan_json["objects"]): + # Nothing in this directory was lto-able; let's just copy the archive. + ctx.actions.copy_file(outputs[archive.opt_objects_dir], archive.objects_dir) + ctx.actions.write(outputs[archive.opt_manifest], "") + return + + output_dir = {} + output_manifest = cmd_args() + for entry in plan_json["objects"]: + if "not_loaded_by_linker" in entry: + continue + + base_dir = plan_json["base_dir"] + source_path = paths.relativize(entry["path"], base_dir) + if not entry["is_bc"]: + opt_object = ctx.actions.declare_output("%s/%s" % (make_cat("thin_lto_opt_copy"), source_path)) + output_manifest.add(opt_object) + copy_cmd = cmd_args([ + lto_copy, + "--to", + opt_object.as_output(), + "--from", + entry["path"], + ], hidden = archive.objects_dir) + ctx.actions.run(copy_cmd, category = make_cat("thin_lto_opt_copy"), identifier = source_path) + output_dir[source_path] = opt_object + continue + + opt_object = ctx.actions.declare_output("%s/%s" % (make_cat("thin_lto_opt_archive"), source_path)) + output_manifest.add(opt_object) + output_dir[source_path] = opt_object + opt_cmd = cmd_args(lto_opt) + opt_cmd.add("--out", opt_object.as_output()) + opt_cmd.add("--input", entry["path"]) + opt_cmd.add("--index", entry["bitcode_file"]) + + opt_cmd.add(cmd_args(hidden = common_opt_cmd)) + opt_cmd.add("--args", opt_argsfile) + + opt_cmd.add("--") + opt_cmd.add(cxx_toolchain.cxx_compiler_info.compiler) + + imports = [sorted_index_link_data[idx].link_data.initial_object for idx in entry["imports"]] + archives = [sorted_index_link_data[idx].link_data.objects_dir for idx in entry["archive_imports"]] + opt_cmd.add(cmd_args( + hidden = imports + archives + [archive.indexes_dir, archive.objects_dir], + )) + ctx.actions.run(opt_cmd, category = make_cat("thin_lto_opt_archive"), identifier = source_path) + + ctx.actions.symlinked_dir(outputs[archive.opt_objects_dir], output_dir) + ctx.actions.write(outputs[archive.opt_manifest], output_manifest, allow_args = True) + + archive_opt_inputs = [archive.plan] + archive_opt_outputs = [archive.opt_objects_dir.as_output(), archive.opt_manifest.as_output()] + ctx.actions.dynamic_output(dynamic = archive_opt_inputs, inputs = [], outputs = archive_opt_outputs, f = optimize_archive) + + for artifact in sorted_index_link_data: + link_data = artifact.link_data + if artifact.data_type == _DataType("bitcode"): + dynamic_optimize( + name = link_data.name, + initial_object = link_data.initial_object, + bc_file = link_data.bc_file, + plan = link_data.plan, + opt_object = link_data.opt_object, + ) + elif artifact.data_type == _DataType("archive"): + dynamic_optimize_archive(link_data) + + linker_argsfile_out = ctx.actions.declare_output(output.basename + ".thinlto_link_argsfile") + + def thin_lto_final_link(ctx: AnalysisContext, artifacts, outputs): + plan = artifacts[link_plan_out].read_json() + link_args = cmd_args() + plan_index = {int(k): v for k, v in plan["index"].items()} + + # non_lto_objects are the ones that weren't compiled with thinlto + # flags. In that case, we need to link against the original object. + non_lto_objects = {int(k): 1 for k in plan["non_lto_objects"]} + opt_objects = [] + for flag in linker_flags: + link_args.add(flag) + + for idx, artifact in enumerate(sorted_index_link_data): + if artifact.data_type == _DataType("dynamic_library"): + append_linkable_args(link_args, artifact.link_data.linkable) + elif artifact.data_type == _DataType("bitcode"): + if idx in plan_index: + opt_objects.append(artifact.link_data.opt_object) + elif idx in non_lto_objects: + opt_objects.append(artifact.link_data.initial_object) + + link_cmd_parts = cxx_link_cmd_parts(cxx_toolchain) + link_cmd = link_cmd_parts.link_cmd + link_cmd.add(common_link_flags) + link_cmd_hidden = [] + + # buildifier: disable=uninitialized + for artifact in sorted_index_link_data: + if artifact.data_type == _DataType("archive"): + link_cmd_hidden.append(artifact.link_data.opt_objects_dir) + link_cmd.add(at_argfile( + actions = ctx.actions, + name = outputs[linker_argsfile_out], + args = link_args, + allow_args = True, + )) + link_cmd.add(cmd_args(final_link_index, format = "@{}")) + link_cmd.add("-o", outputs[output].as_output()) + if linker_map: + link_cmd.add(linker_map_args(cxx_toolchain, outputs[linker_map].as_output()).flags) + link_cmd_hidden.extend([ + link_args, + opt_objects, + ]) + link_cmd.add(link_cmd_parts.post_linker_flags) + link_cmd.add(cmd_args(hidden = link_cmd_hidden)) + + ctx.actions.run(link_cmd, category = make_cat("thin_lto_link"), identifier = identifier, local_only = True) + + final_link_inputs = [link_plan_out, final_link_index] + archive_opt_manifests + ctx.actions.dynamic_output( + dynamic = final_link_inputs, + inputs = [], + outputs = [output.as_output()] + ([linker_map.as_output()] if linker_map else []) + [linker_argsfile_out.as_output()], + f = thin_lto_final_link, + ) + + final_output = output + unstripped_output = output + if opts.strip: + strip_args = opts.strip_args_factory(ctx) if opts.strip_args_factory else cmd_args() + final_output = strip_object(ctx, cxx_toolchain, final_output, strip_args, category_suffix) + + return LinkedObject( + output = final_output, + unstripped_output = unstripped_output, + prebolt_output = output, + dwp = None, + external_debug_info = ArtifactTSet(), + linker_argsfile = linker_argsfile_out, + linker_filelist = None, # DistLTO doesn't use filelists + linker_command = None, # There is no notion of a single linker command for DistLTO + index_argsfile = index_argsfile_out, + dist_thin_lto_codegen_argsfile = opt_flags_for_debugging_argsfile, + dist_thin_lto_index_argsfile = index_flags_for_debugging_argsfile, + ) diff --git a/prelude/cxx/dist_lto/dist_lto.bzl b/prelude/cxx/dist_lto/dist_lto.bzl index 1c49f435a00..d1d4cef55aa 100644 --- a/prelude/cxx/dist_lto/dist_lto.bzl +++ b/prelude/cxx/dist_lto/dist_lto.bzl @@ -16,22 +16,23 @@ load( "bolt", "cxx_use_bolt", ) +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") load( "@prelude//cxx:cxx_link_utility.bzl", "cxx_link_cmd_parts", "linker_map_args", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerType") load("@prelude//cxx:debug.bzl", "SplitDebugMode") load( "@prelude//cxx:dwp.bzl", "run_dwp_action", ) +load("@prelude//cxx:link_types.bzl", "LinkOptions") load( "@prelude//linking:link_info.bzl", "ArchiveLinkable", "FrameworksLinkable", # @unused Used as a type - "LinkArgs", "LinkInfo", "LinkedObject", "ObjectsLinkable", @@ -40,6 +41,9 @@ load( "map_to_link_infos", "unpack_external_debug_info", ) +load("@prelude//linking:stamp_build_info.bzl", "stamp_build_info") +load("@prelude//linking:strip.bzl", "strip_object") +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:lazy.bzl", "lazy") _BitcodeLinkData = record( @@ -79,17 +83,12 @@ _PrePostFlags = record( post_flags = list, ) -def cxx_dist_link( +def cxx_gnu_dist_link( ctx: AnalysisContext, - links: list[LinkArgs], # The destination for the link output. output: Artifact, - linker_map: [Artifact, None] = None, - # A category suffix that will be added to the category of the link action that is generated. - category_suffix: [str, None] = None, - # An identifier that will uniquely name this link action in the context of a category. Useful for - # differentiating multiple link actions in the same rule. - identifier: [str, None] = None, + opts: LinkOptions, + linker_map: Artifact | None = None, # This action will only happen if split_dwarf is enabled via the toolchain. generate_dwp: bool = True, executable_link: bool = True) -> LinkedObject: @@ -108,6 +107,15 @@ def cxx_dist_link( that is easy for us to consume from within bzl. """ + links = opts.links + + # A category suffix that will be added to the category of the link action that is generated. + category_suffix = opts.category_suffix + + # An identifier that will uniquely name this link action in the context of a category. Useful for + # differentiating multiple link actions in the same rule. + identifier = opts.identifier + def make_cat(c: str) -> str: """ Used to make sure categories for our actions include the provided suffix """ if category_suffix != None: @@ -149,11 +157,11 @@ def cxx_dist_link( link_infos = map_to_link_infos(links) - cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] - lto_planner = cxx_toolchain.dist_lto_tools_info.planner - lto_opt = cxx_toolchain.dist_lto_tools_info.opt - lto_prepare = cxx_toolchain.dist_lto_tools_info.prepare - lto_copy = cxx_toolchain.dist_lto_tools_info.copy + cxx_toolchain = get_cxx_toolchain_info(ctx) + lto_planner = cxx_toolchain.internal_tools.dist_lto.planner[LinkerType("gnu")] + lto_opt = cxx_toolchain.internal_tools.dist_lto.opt[LinkerType("gnu")] + lto_prepare = cxx_toolchain.internal_tools.dist_lto.prepare + lto_copy = cxx_toolchain.internal_tools.dist_lto.copy PREPEND_ARCHIVE_NAMES = [ # T130644072: If linked with `--whole-archive`, Clang builtins must be at the @@ -227,7 +235,7 @@ def cxx_dist_link( ), ) index_link_data.append(data) - plan_outputs.extend([bc_output, plan_output]) + plan_outputs.extend([bc_output.as_output(), plan_output.as_output()]) elif isinstance(linkable, ArchiveLinkable) and linkable.supports_lto: # Our implementation of Distributed ThinLTO operates on individual objects, not archives. Since these # archives might still contain LTO-able bitcode, we first extract the objects within the archive into @@ -275,12 +283,12 @@ def cxx_dist_link( index_link_data.append(data) archive_opt_manifests.append(archive_opt_manifest) plan_inputs.extend([archive_manifest, archive_objects]) - plan_outputs.extend([archive_indexes, archive_plan]) + plan_outputs.extend([archive_indexes.as_output(), archive_plan.as_output()]) else: add_linkable(idx, linkable) index_link_data.append(None) - index_argsfile_out = ctx.actions.declare_output(output.basename + ".thinlto.index.argsfile") + index_argsfile_out = ctx.actions.declare_output(output.basename + ".thinlto_index_argsfile") final_link_index = ctx.actions.declare_output(output.basename + ".final_link_index") def dynamic_plan(link_plan: Artifact, index_argsfile_out: Artifact, final_link_index: Artifact): @@ -335,7 +343,7 @@ def cxx_dist_link( archive_args = prepend_index_args if link_data.prepend else index_args - archive_args.hidden(link_data.objects_dir) + archive_args.add(cmd_args(hidden = link_data.objects_dir)) if not link_data.link_whole: archive_args.add("-Wl,--start-lib") @@ -347,8 +355,6 @@ def cxx_dist_link( if not link_data.link_whole: archive_args.add("-Wl,--end-lib") - archive_args.hidden(link_data.objects_dir) - add_post_flags(idx) index_argfile, _ = ctx.actions.write( @@ -359,15 +365,14 @@ def cxx_dist_link( index_cat = make_cat("thin_lto_index") index_file_out = ctx.actions.declare_output(make_id(index_cat) + "/index") - index_out_dir = cmd_args(index_file_out.as_output()).parent() + index_out_dir = cmd_args(index_file_out.as_output(), parent = 1) index_cmd_parts = cxx_link_cmd_parts(cxx_toolchain) index_cmd = index_cmd_parts.link_cmd index_cmd.add(cmd_args(index_argfile, format = "@{}")) - output_as_string = cmd_args(output) - output_as_string.ignore_artifacts() + output_as_string = cmd_args(output, ignore_artifacts = True) index_cmd.add("-o", output_as_string) index_cmd.add(cmd_args(index_file_out.as_output(), format = "-Wl,--thinlto-index-only={}")) index_cmd.add("-Wl,--thinlto-emit-imports-files") @@ -385,10 +390,10 @@ def cxx_dist_link( plan_cmd = cmd_args([lto_planner, "--meta", index_meta_file, "--index", index_out_dir, "--link-plan", outputs[link_plan].as_output(), "--final-link-index", outputs[final_link_index].as_output(), "--"]) plan_cmd.add(index_cmd) - plan_extra_inputs = cmd_args() - plan_extra_inputs.add(index_meta) - plan_extra_inputs.add(index_args) - plan_cmd.hidden(plan_extra_inputs) + plan_cmd.add(cmd_args(hidden = [ + index_meta, + index_args, + ])) ctx.actions.run(plan_cmd, category = index_cat, identifier = identifier, local_only = True) @@ -399,7 +404,7 @@ def cxx_dist_link( # directly, since it uses `ctx.outputs` to bind its outputs. Instead of doing Starlark hacks to work around # the lack of `ctx.outputs`, we declare an empty file as a dynamic input. plan_inputs.append(ctx.actions.write(output.basename + ".plan_hack.txt", "")) - plan_outputs.extend([link_plan, index_argsfile_out, final_link_index]) + plan_outputs.extend([link_plan.as_output(), index_argsfile_out.as_output(), final_link_index.as_output()]) ctx.actions.dynamic_output(dynamic = plan_inputs, inputs = [], outputs = plan_outputs, f = plan) link_plan_out = ctx.actions.declare_output(output.basename + ".link-plan.json") @@ -419,6 +424,12 @@ def cxx_dist_link( opt_common_flags = prepare_opt_flags(link_infos) + # Create an argsfile and dump all the flags to be processed later by lto_opt. + # These flags are common to all opt actions, we don't need an argfile for each action, one + # for the entire link unit will do. + opt_argsfile = ctx.actions.declare_output(output.basename + ".lto_opt_argsfile") + ctx.actions.write(opt_argsfile.as_output(), opt_common_flags, allow_args = True) + # We declare a separate dynamic_output for every object file. It would # maybe be simpler to have a single dynamic_output that produced all the # opt actions, but an action needs to re-run whenever the analysis that @@ -453,10 +464,7 @@ def cxx_dist_link( elif cxx_toolchain.split_debug_mode == SplitDebugMode("single"): opt_cmd.add("--split-dwarf=single") - # Create an argsfile and dump all the flags to be processed later. - opt_argsfile = ctx.actions.declare_output(outputs[opt_object].basename + ".opt.argsfile") - ctx.actions.write(opt_argsfile.as_output(), opt_common_flags, allow_args = True) - opt_cmd.hidden(opt_common_flags) + opt_cmd.add(cmd_args(hidden = opt_common_flags)) opt_cmd.add("--args", opt_argsfile) opt_cmd.add("--") @@ -464,11 +472,10 @@ def cxx_dist_link( imports = [index_link_data[idx].link_data.initial_object for idx in plan_json["imports"]] archives = [index_link_data[idx].link_data.objects_dir for idx in plan_json["archive_imports"]] - opt_cmd.hidden(imports) - opt_cmd.hidden(archives) + opt_cmd.add(cmd_args(hidden = imports + archives)) ctx.actions.run(opt_cmd, category = make_cat("thin_lto_opt_object"), identifier = name) - ctx.actions.dynamic_output(dynamic = [plan], inputs = [], outputs = [opt_object], f = optimize_object) + ctx.actions.dynamic_output(dynamic = [plan], inputs = [], outputs = [opt_object.as_output()], f = optimize_object) def dynamic_optimize_archive(archive: _ArchiveLinkData): def optimize_archive(ctx: AnalysisContext, artifacts, outputs): @@ -493,9 +500,7 @@ def cxx_dist_link( opt_object.as_output(), "--from", entry["path"], - ]) - - copy_cmd.hidden(archive.objects_dir) + ], hidden = archive.objects_dir) ctx.actions.run(copy_cmd, category = make_cat("thin_lto_opt_copy"), identifier = source_path) output_dir[source_path] = opt_object continue @@ -508,14 +513,12 @@ def cxx_dist_link( opt_cmd.add("--input", entry["path"]) opt_cmd.add("--index", entry["bitcode_file"]) - if cxx_toolchain.split_debug_mode == SplitDebugMode("none") or ctx.attrs.distributed_thinlto_partial_split_dwarf: + if cxx_toolchain.split_debug_mode == SplitDebugMode("none") or getattr(ctx.attrs, "distributed_thinlto_partial_split_dwarf", False): opt_cmd.add("--split-dwarf=none") elif cxx_toolchain.split_debug_mode == SplitDebugMode("single"): opt_cmd.add("--split-dwarf=single") - opt_argsfile = ctx.actions.declare_output(opt_object.basename + ".opt.argsfile") - ctx.actions.write(opt_argsfile.as_output(), opt_common_flags, allow_args = True) - opt_cmd.hidden(opt_common_flags) + opt_cmd.add(cmd_args(hidden = opt_common_flags)) opt_cmd.add("--args", opt_argsfile) opt_cmd.add("--") @@ -523,17 +526,16 @@ def cxx_dist_link( imports = [index_link_data[idx].link_data.initial_object for idx in entry["imports"]] archives = [index_link_data[idx].link_data.objects_dir for idx in entry["archive_imports"]] - opt_cmd.hidden(imports) - opt_cmd.hidden(archives) - opt_cmd.hidden(archive.indexes_dir) - opt_cmd.hidden(archive.objects_dir) + opt_cmd.add(cmd_args( + hidden = imports + archives + [archive.indexes_dir, archive.objects_dir], + )) ctx.actions.run(opt_cmd, category = make_cat("thin_lto_opt_archive"), identifier = source_path) ctx.actions.symlinked_dir(outputs[archive.opt_objects_dir], output_dir) ctx.actions.write(outputs[archive.opt_manifest], output_manifest, allow_args = True) archive_opt_inputs = [archive.plan] - archive_opt_outputs = [archive.opt_objects_dir, archive.opt_manifest] + archive_opt_outputs = [archive.opt_objects_dir.as_output(), archive.opt_manifest.as_output()] ctx.actions.dynamic_output(dynamic = archive_opt_inputs, inputs = [], outputs = archive_opt_outputs, f = optimize_archive) for artifact in index_link_data: @@ -551,7 +553,7 @@ def cxx_dist_link( elif artifact.data_type == _DataType("archive"): dynamic_optimize_archive(link_data) - linker_argsfile_out = ctx.actions.declare_output(output.basename + ".thinlto.link.argsfile") + linker_argsfile_out = ctx.actions.declare_output(output.basename + ".thinlto_link_argsfile") def thin_lto_final_link(ctx: AnalysisContext, artifacts, outputs): plan = artifacts[link_plan_out].read_json() @@ -563,18 +565,14 @@ def cxx_dist_link( non_lto_objects = {int(k): 1 for k in plan["non_lto_objects"]} current_index = 0 opt_objects = [] - archives = [] for link in link_infos: link_args.add(link.pre_flags) for linkable in link.linkables: if isinstance(linkable, ObjectsLinkable): - new_objs = [] for obj in linkable.objects: if current_index in plan_index: - new_objs.append(index_link_data[current_index].link_data.opt_object) opt_objects.append(index_link_data[current_index].link_data.opt_object) elif current_index in non_lto_objects: - new_objs.append(obj) opt_objects.append(obj) current_index += 1 else: @@ -583,28 +581,28 @@ def cxx_dist_link( link_cmd_parts = cxx_link_cmd_parts(cxx_toolchain) link_cmd = link_cmd_parts.link_cmd - final_link_argfile, final_link_inputs = ctx.actions.write( - outputs[linker_argsfile_out].as_output(), - link_args, - allow_args = True, - ) + link_cmd_hidden = [] # buildifier: disable=uninitialized for artifact in index_link_data: if artifact != None and artifact.data_type == _DataType("archive"): - link_cmd.hidden(artifact.link_data.opt_objects_dir) - link_cmd.add(cmd_args(final_link_argfile, format = "@{}")) + link_cmd_hidden.append(artifact.link_data.opt_objects_dir) + link_cmd.add(at_argfile( + actions = ctx.actions, + name = outputs[linker_argsfile_out], + args = link_args, + allow_args = True, + )) link_cmd.add(cmd_args(final_link_index, format = "@{}")) link_cmd.add("-o", outputs[output].as_output()) if linker_map: link_cmd.add(linker_map_args(cxx_toolchain, outputs[linker_map].as_output()).flags) - link_cmd_extra_inputs = cmd_args() - link_cmd_extra_inputs.add(final_link_inputs) - link_cmd.hidden(link_cmd_extra_inputs) - link_cmd.hidden(link_args) - link_cmd.hidden(opt_objects) - link_cmd.hidden(archives) + link_cmd_hidden.extend([ + link_args, + opt_objects, + ]) link_cmd.add(link_cmd_parts.post_linker_flags) + link_cmd.add(cmd_args(hidden = link_cmd_hidden)) ctx.actions.run(link_cmd, category = make_cat("thin_lto_link"), identifier = identifier, local_only = True) @@ -612,7 +610,7 @@ def cxx_dist_link( ctx.actions.dynamic_output( dynamic = final_link_inputs, inputs = [], - outputs = [output] + ([linker_map] if linker_map else []) + [linker_argsfile_out], + outputs = [output.as_output()] + ([linker_map.as_output()] if linker_map else []) + [linker_argsfile_out.as_output()], f = thin_lto_final_link, ) @@ -624,7 +622,9 @@ def cxx_dist_link( ], ) - final_output = output if not (executable_link and cxx_use_bolt(ctx)) else bolt(ctx, output, identifier) + final_output = output if not (executable_link and cxx_use_bolt(ctx)) else bolt(ctx, output, external_debug_info, identifier) + final_output = stamp_build_info(ctx, final_output) if executable_link else final_output + dwp_output = ctx.actions.declare_output(output.short_path.removesuffix("-wrapper") + ".dwp") if generate_dwp else None if generate_dwp: @@ -643,14 +643,21 @@ def cxx_dist_link( local_only = True, ) + unstripped_output = final_output + if opts.strip: + strip_args = opts.strip_args_factory(ctx) if opts.strip_args_factory else cmd_args() + final_output = strip_object(ctx, cxx_toolchain, final_output, strip_args, category_suffix) + return LinkedObject( output = final_output, - unstripped_output = final_output, + unstripped_output = unstripped_output, prebolt_output = output, dwp = dwp_output, external_debug_info = external_debug_info, linker_argsfile = linker_argsfile_out, - linker_filelist = None, # DistLTO unsupported for Darwin linkers - linker_command = None, # DistLTO unsupported for debugging of command + linker_filelist = None, # DistLTO doesn't use filelists + linker_command = None, # There is no notion of a single linker command for DistLTO index_argsfile = index_argsfile_out, + dist_thin_lto_codegen_argsfile = None, # Only Darwin builds provide is argsfile + dist_thin_lto_index_argsfile = None, # Only Darwin builds provide this argsfile ) diff --git a/prelude/cxx/dist_lto/tools.bzl b/prelude/cxx/dist_lto/tools.bzl index 604ccb37e17..7adb34c4a6c 100644 --- a/prelude/cxx/dist_lto/tools.bzl +++ b/prelude/cxx/dist_lto/tools.bzl @@ -5,15 +5,21 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "DistLtoToolsInfo") +load("@prelude//cxx:cxx_toolchain_types.bzl", "DistLtoToolsInfo", "LinkerType") def _impl(ctx): return [ DefaultInfo(), DistLtoToolsInfo( - planner = ctx.attrs.planner[RunInfo], + planner = { + LinkerType(linker_type): planner[RunInfo] + for linker_type, planner in ctx.attrs.planner.items() + }, + opt = { + LinkerType(linker_type): opt[RunInfo] + for linker_type, opt in ctx.attrs.opt.items() + }, prepare = ctx.attrs.prepare[RunInfo], - opt = ctx.attrs.opt[RunInfo], copy = ctx.attrs.copy[RunInfo], ), ] @@ -21,9 +27,15 @@ def _impl(ctx): dist_lto_tools = rule( impl = _impl, attrs = { - "copy": attrs.dep(), - "opt": attrs.dep(), - "planner": attrs.dep(), - "prepare": attrs.dep(), + "copy": attrs.dep(providers = [RunInfo]), + "opt": attrs.dict( + key = attrs.enum(LinkerType.values()), + value = attrs.dep(providers = [RunInfo]), + ), + "planner": attrs.dict( + key = attrs.enum(LinkerType.values()), + value = attrs.dep(providers = [RunInfo]), + ), + "prepare": attrs.dep(providers = [RunInfo]), }, ) diff --git a/prelude/cxx/dist_lto/tools/BUCK.v2 b/prelude/cxx/dist_lto/tools/BUCK.v2 index 3abffa28230..9e1b5d325e8 100644 --- a/prelude/cxx/dist_lto/tools/BUCK.v2 +++ b/prelude/cxx/dist_lto/tools/BUCK.v2 @@ -1,16 +1,33 @@ load("@prelude//cxx/dist_lto:tools.bzl", "dist_lto_tools") +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() prelude = native prelude.python_bootstrap_binary( - name = "dist_lto_planner", - main = "dist_lto_planner.py", + name = "dist_lto_planner_gnu", + main = "dist_lto_planner_gnu.py", + visibility = ["PUBLIC"], +) + +prelude.python_bootstrap_binary( + name = "dist_lto_planner_darwin", + main = "dist_lto_planner_darwin.py", visibility = ["PUBLIC"], ) prelude.python_bootstrap_binary( - name = "dist_lto_opt", - main = "dist_lto_opt.py", + name = "dist_lto_opt_gnu", + main = "dist_lto_opt_gnu.py", + visibility = ["PUBLIC"], +) + +prelude.python_bootstrap_binary( + name = "dist_lto_opt_darwin", + main = "dist_lto_opt_darwin.py", visibility = ["PUBLIC"], ) @@ -28,17 +45,23 @@ prelude.python_bootstrap_binary( dist_lto_tools( name = "dist_lto_tools", - planner = ":dist_lto_planner", - opt = ":dist_lto_opt", - prepare = ":dist_lto_prepare", copy = ":dist_lto_copy", + opt = { + "darwin": ":dist_lto_opt_darwin", + "gnu": ":dist_lto_opt_gnu", + }, + planner = { + "darwin": ":dist_lto_planner_darwin", + "gnu": ":dist_lto_planner_gnu", + }, + prepare = ":dist_lto_prepare", visibility = ["PUBLIC"], ) prelude.python_test( name = "test_dist_lto_opt", srcs = [ + "dist_lto_opt_gnu.py", "tests/test_dist_lto_opt.py", - "dist_lto_opt.py", ], ) diff --git a/prelude/cxx/dist_lto/tools/dist_lto_opt_darwin.py b/prelude/cxx/dist_lto/tools/dist_lto_opt_darwin.py new file mode 100644 index 00000000000..3af21826fdb --- /dev/null +++ b/prelude/cxx/dist_lto/tools/dist_lto_opt_darwin.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Python wrapper around clang intended to optimize and codegen bitcode files +to native object files for distributed thin lto. This script munges compiler +flags to prepare a suitable clang invocation. +""" + +import argparse +import subprocess +import sys + +from typing import List + + +def main(argv: List[str]) -> int: + parser = argparse.ArgumentParser() + parser.add_argument("--out", help="The output native object file.") + parser.add_argument("--input", help="The input bitcode object file.") + parser.add_argument("--index", help="The thinlto index file.") + # Split dwarf isn't applicable to Darwin, ignore the flag + parser.add_argument("--split-dwarf", required=False, help="Split dwarf option.") + parser.add_argument( + "--args", help="The argsfile containing unfiltered and unprocessed flags." + ) + parser.add_argument("opt_args", nargs=argparse.REMAINDER) + args = parser.parse_args(argv[1:]) + + with open(args.args, "r") as argsfile: + clang_opt_flags = argsfile.read().splitlines() + + clang_opt_flags.extend( + [ + "-o", + args.out, + "-x", + "ir", # Without this the input file type is incorrectly inferred. + "-c", + args.input, + f"-fthinlto-index={args.index}", + # When lto_mode=thin/full all compile actions are passed `-flto=thin/full`. We + # want to generate a native object file here. + "-fno-lto", + "-Werror=unused-command-line-argument", + ] + ) + + # TODO(T187767988) - Check if returning the subprocesses exit code is sufficient. Server LLVM created such a wrapper + # script in the first place because of a bug in Clang where it fails but does not set a non-zero exit code (T116695431). Fbcode's + # version of this script measure the size of the output file to determine success. The task is closed, but if the bug + # still persists, we may need to do the same. + result = subprocess.run(clang_opt_flags) + return result.returncode + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/prelude/cxx/dist_lto/tools/dist_lto_opt.py b/prelude/cxx/dist_lto/tools/dist_lto_opt_gnu.py similarity index 99% rename from prelude/cxx/dist_lto/tools/dist_lto_opt.py rename to prelude/cxx/dist_lto/tools/dist_lto_opt_gnu.py index 2fcad7f4eb7..e6d34d3b6fd 100644 --- a/prelude/cxx/dist_lto/tools/dist_lto_opt.py +++ b/prelude/cxx/dist_lto/tools/dist_lto_opt_gnu.py @@ -237,7 +237,7 @@ def main(argv: List[str]) -> int: # 1. a spliter "--", it's not used anywhere; # 2. the fbcc wrapper script path # 3. the "-cc" arg pointing to the compiler we use - # EXAMPLE: ['--', 'buck-out/v2/gen/fbcode/8e3db19fe005003a/tools/build/buck/wrappers/__fbcc__/fbcc', '--cc=fbcode/third-party-buck/platform010/build/llvm-fb/12/bin/clang++', '--target=x86_64-redhat-linux-gnu', ...] + # EXAMPLE: ['--', 'buck-out/v2/gen/fbcode/8e3db19fe005003a/tools/build/buck/wrappers/__fbcc__/fbcc', '--cc=fbcode/third-party-buck/platform010/build/llvm-fb//bin/clang++', '--target=x86_64-redhat-linux-gnu', ...] clang_cc1_flags = _cleanup_flags(args.opt_args[2:] + clang_opt_flags) if clang_cc1_flags is None: return EXIT_FAILURE diff --git a/prelude/cxx/dist_lto/tools/dist_lto_planner_darwin.py b/prelude/cxx/dist_lto/tools/dist_lto_planner_darwin.py new file mode 100644 index 00000000000..81904bf60f7 --- /dev/null +++ b/prelude/cxx/dist_lto/tools/dist_lto_planner_darwin.py @@ -0,0 +1,317 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +A simple wrapper around a distributed thinlto index command to fit into buck2's +distributed thinlto build. + +This reads in a couple of things: + 1. The "meta" file. This is a list of tuples of (object file, index output, + plan output). All items are line-separated (so each tuple is three lines). + 2. The index and link plan output paths + 3. The commands for the actual index command. + +It will invoke the index command and then copy the index outputs to the +requested locations and write a plan for each of those objects. This "plan" is +a simple json file with the most important thing being a list of the indices +of the imports needed for that file. + +It will then additionally write a link plan, which is just a translation of +the thinlto index (which lists the objects actually needed for the final link). + + +Both opt and link plans use indices to refer to other files because it allows the bzl +code to easily map back to other objects held in buck memory. +""" + +# pyre-unsafe + +import argparse +import json +import os +import os.path +import subprocess +import sys +from typing import List + + +def _get_argsfile(args) -> str: + # go through the flags passed to linker and find the index argsfile + argsfiles = list( + filter(lambda arg: arg.endswith("thinlto_index_argsfile"), args.index_args) + ) + assert ( + len(argsfiles) == 1 + ), f"expect only 1 argsfile but seeing multiple ones: {argsfiles}" + argsfile = argsfiles[0] + if argsfile.startswith("@"): + argsfile = argsfile[1:] + return argsfile + + +def _extract_lib_search_path(argsfile_path: str) -> List[str]: + lib_search_path = [] + with open(argsfile_path) as argsfile: + for line in argsfile: + if line.startswith("-L"): + lib_search_path.append(line.strip()) + return lib_search_path + + +def main(argv): + parser = argparse.ArgumentParser() + parser.add_argument("--meta") + parser.add_argument("--index") + parser.add_argument("--link-plan") + parser.add_argument("--final-link-index") + parser.add_argument("index_args", nargs=argparse.REMAINDER) + args = parser.parse_args(argv[1:]) + + subprocess.check_call(args.index_args[1:]) + + bitcode_suffix = ".thinlto.bc" + imports_suffix = ".imports" + opt_objects_suffix = ".opt.o" # please note the files are not exist yet, this is to generate the index file use in final link + + with open(args.meta) as meta: + meta_lines = [line.strip() for line in meta.readlines()] + + def read_imports(path, imports_path): + with open(imports_path) as infile: + return [line.strip() for line in infile.readlines()] + + def index_path(path): + return os.path.join(args.index, path) + + # The meta file comes directly from dist_lto.bzl and consists of a list of + # 7-tuples of information. It is easiest for us to write each tuple member + # as a separate line in Starlark, so these 7-tuples are encoded in groups + # of seven lines. + # + # The seven pieces of information are: + # 1. The path to the source bitcode file. This is used as an index into + # a dictionary (`mapping`) that records much of the metadata coming + # from these lines. + # 2. The path to an output bitcode file. This script is expected to place a + # ThinLTO index file at this location (suffixed `.thinlto.bc`). + # 3. The path to an output plan. This script is expected to place a link + # plan here (a JSON document indicating which other object files this) + # object file depends on, among other things. + # 4. The link data's index in the Starlark array. + # 5. If this object file came from an archive, the name of the archive. Otherwise, + # this line is empty. + # 6. If this object file came from an archive, the path to an output plan. + # This script is expected to produce an archive link plan here (a JSON) + # document similar to the object link plan, except containing link + # information for every file in the archive from which this object + # came. Otherwise, this line is empty. + # 7. If this object file came from an archive, the indexes directory of that + # archive. This script is expected to place all ThinLTO indexes derived + # from object files originating from this archive in that directory. + # Otherwise, this line is empty. + # + # There are two indices that are derived from this meta file: the object + # index (mapping["index"]) and the archive index (mapping["archive_index"]). + # These indices are indices into Starlark arrays for all objects and archive + # linkables, respectively. This script does not inspect them. + mapping = {} + archives = {} + for i in range(0, len(meta_lines), 7): + path = meta_lines[i] + output = meta_lines[i + 1] + plan_output = meta_lines[i + 2] + idx = int(meta_lines[i + 3]) + archive_name = meta_lines[i + 4] + archive_plan = meta_lines[i + 5] + archive_index_dir = meta_lines[i + 6] + + archive_idx = idx if output == "" else None # archives do not have outputs + mapping[path] = { + "output": output, + "plan_output": plan_output, + "index": idx, + "archive_index": archive_idx, + "archive_name": archive_name, + } + if archive_idx is not None: + archives[idx] = { + "name": archive_name, + "objects": [], + "plan": archive_plan, + "index_dir": archive_index_dir, + } + + # We read the `index`` and `index.full`` files produced by linker in index stage + # and translate them to 2 outputs: + # 1. A link plan build final_link args. (This one may be able to be removed if we refactor the workflow) + # 2. A files list (*.final_link_index) used for final link stage which includes all the + # files needed. it's based on index.full with some modification, like path updates + # and redundant(added by toolchain) dependencies removing. + index = {} + index_files_set = set() + loaded_input_bitcode_files = set() + with open(index_path("index")) as indexfile: + for line in indexfile: + line = line.strip() + index_files_set.add(line) + path = os.path.relpath(line, start=args.index) + loaded_input_bitcode_files.add(path) + index[mapping[path]["index"]] = 1 + + def _input_bitcode_file_path_is_loaded_by_linker(path): + return path in loaded_input_bitcode_files + + non_lto_objects = {} + for path, data in sorted(mapping.items(), key=lambda v: v[0]): + output_loc = data["output"] + if os.path.exists(output_loc): + continue + + if data["archive_index"] is not None: + archives[data["archive_index"]]["objects"].append(path) + continue + + bc_file = index_path(path) + bitcode_suffix + imports_path = index_path(path) + imports_suffix + os.makedirs(os.path.dirname(output_loc), exist_ok=True) + + if os.path.exists(imports_path): + assert os.path.exists(bc_file), "missing bc file for %s" % path + os.rename(bc_file, output_loc) + imports = read_imports(path, imports_path) + imports_list = [] + archives_list = [] + for path in imports: + entry = mapping[path] + if entry["archive_index"] is not None: + archives_list.append(int(entry["archive_index"])) + else: + imports_list.append(entry["index"]) + plan = { + "imports": imports_list, + "archive_imports": archives_list, + "index": data["index"], + "bitcode_file": bc_file, + "path": path, + "is_bc": True, + } + else: + non_lto_objects[data["index"]] = 1 + with open(output_loc, "w"): + pass + plan = { + "is_bc": False, + } + + with open(data["plan_output"], "w") as planout: + json.dump(plan, planout, sort_keys=True) + + for archive in archives.values(): + # For archives, we must produce a plan that provides Starlark enough + # information about how to launch a dynamic opt for each object file + # in the archive. + archive_plan = {} + + # This is convenient to store, since it's difficult for Starlark to + # calculate it. + archive_plan["base_dir"] = os.path.dirname(archive["plan"]) + object_plans = [] + for obj in archive["objects"]: + imports_path = index_path(obj) + imports_suffix + output_path = archive["index_dir"] + os.makedirs(output_path, exist_ok=True) + if os.path.exists(imports_path): + bc_file = index_path(obj) + bitcode_suffix + os.rename(bc_file, os.path.join(output_path, os.path.basename(bc_file))) + if not _input_bitcode_file_path_is_loaded_by_linker(obj): + object_plans.append( + { + "not_loaded_by_linker": True, + "is_bc": True, + } + ) + continue + + imports = read_imports(path, imports_path) + imports_list = [] + archives_list = [] + for path in imports: + entry = mapping[path] + if entry["archive_index"] is not None: + archives_list.append(int(entry["archive_index"])) + else: + imports_list.append(entry["index"]) + object_plans.append( + { + "is_bc": True, + "path": obj, + "imports": imports_list, + "archive_imports": archives_list, + "bitcode_file": os.path.join( + output_path, os.path.basename(bc_file) + ), + } + ) + else: + object_plans.append( + { + "is_bc": False, + "path": obj, + } + ) + + archive_plan["objects"] = object_plans + with open(archive["plan"], "w") as planout: + json.dump(archive_plan, planout, sort_keys=True) + + with open(args.link_plan, "w") as outfile: + json.dump( + { + "non_lto_objects": non_lto_objects, + "index": index, + }, + outfile, + indent=2, + sort_keys=True, + ) + + # Append all search path flags (e.g -Lfbcode/third-party-buck/platform010/build/glibc/lib) from argsfile to final_index + # this workaround is to make dist_lto compatible with link_group. see T136415235 for more info + argsfile = _get_argsfile(args) + lib_search_path = _extract_lib_search_path(argsfile) + + # build index file for final link use + with open(index_path("index.full")) as full_index_input, open( + args.final_link_index, "w" + ) as final_link_index_output: + final_link_index_output.write("\n".join(lib_search_path) + "\n") + for line in full_index_input: + line = line.strip() + path = os.path.relpath(line, start=args.index) + if line in index_files_set: + if mapping[path]["output"]: + # handle files that were not extracted from archives + output = mapping[path]["output"].replace( + bitcode_suffix, opt_objects_suffix + ) + final_link_index_output.write(output + "\n") + elif os.path.exists(index_path(path) + imports_suffix): + # handle files built from source that were extracted from archives + opt_objects_path = path.replace( + "/objects/", "/opt_objects/objects/" + ) + final_link_index_output.write(opt_objects_path + "\n") + else: + # handle pre-built archives + final_link_index_output.write(line + "\n") + else: + # handle input files that did not come from linker input, e.g. linkerscirpts + final_link_index_output.write(line + "\n") + + +sys.exit(main(sys.argv)) diff --git a/prelude/cxx/dist_lto/tools/dist_lto_planner.py b/prelude/cxx/dist_lto/tools/dist_lto_planner_gnu.py old mode 100755 new mode 100644 similarity index 99% rename from prelude/cxx/dist_lto/tools/dist_lto_planner.py rename to prelude/cxx/dist_lto/tools/dist_lto_planner_gnu.py index 09a92105771..a83f90a809f --- a/prelude/cxx/dist_lto/tools/dist_lto_planner.py +++ b/prelude/cxx/dist_lto/tools/dist_lto_planner_gnu.py @@ -43,7 +43,7 @@ def _get_argsfile(args) -> str: # go through the flags passed to linker and find the index argsfile argsfiles = list( - filter(lambda arg: arg.endswith("thinlto.index.argsfile"), args.index_args) + filter(lambda arg: arg.endswith("thinlto_index_argsfile"), args.index_args) ) assert ( len(argsfiles) == 1 diff --git a/prelude/cxx/dist_lto/tools/dist_lto_prepare.py b/prelude/cxx/dist_lto/tools/dist_lto_prepare.py index 69f7fce54c5..3fe99839633 100644 --- a/prelude/cxx/dist_lto/tools/dist_lto_prepare.py +++ b/prelude/cxx/dist_lto/tools/dist_lto_prepare.py @@ -15,8 +15,10 @@ import enum import json import os +import shutil import subprocess import sys +import tempfile from typing import List, Tuple @@ -80,67 +82,58 @@ def main(argv: List[str]) -> int: # a long time, llvm-ar does not support --output and the change in llvm-ar # looks like it has stalled for years (https://reviews.llvm.org/D69418) # So, we need to invoke ar in the directory that we want it to extract into, and so - # need to adjust some paths. - ar_path = os.path.relpath(args.ar, start=objects_path) - archive_path = os.path.relpath(args.archive, start=objects_path) + # need absolute paths. + ar_path = os.path.abspath(args.ar) + archive_path = os.path.abspath(args.archive) output = subprocess.check_output( [ar_path, "t", archive_path], cwd=objects_path ).decode() member_list = [member for member in output.split("\n") if member] - # no duplicated filename + # This will extract all the members of the archive, including duplicates + # replacing existing duplicates. That is if first/foo.txt and second/foo.txt + # are placed in an archive in that order, this will leave second/foo.txt + # in the objects_path. output = subprocess.check_output( [ar_path, "xv", archive_path], cwd=objects_path ).decode() - for line in output.splitlines(): - assert line.startswith("x - ") - obj = line[4:] - known_objects.append(_gen_path(objects_path, obj)) # Count all members of the same name. counter = {} for member in member_list: counter.setdefault(member, 0) counter[member] += 1 - - for member, count in counter.items(): - if count <= 1: - continue - for current in range(1, count + 1): - if current == 1: - # just extract the file - output = subprocess.check_output( - [ar_path, "xN", str(current), archive_path, member], - cwd=objects_path, - ).decode() - assert not output - # We've already added this above. - else: - # llvm doesn't allow --output so we need this clumsiness - tmp_filename = "tmp" - current_file = _gen_filename(member, current) - # rename current 'member' file to tmp - output = subprocess.check_output( - ["mv", member, tmp_filename], cwd=objects_path - ).decode() - assert not output + # Insert all objects at most once into the list of known objects + if counter[member] == 1: + known_objects.append(_gen_path(objects_path, member)) + + with tempfile.TemporaryDirectory() as temp_dir: + # For each duplicate member, rename and extract duplicates 1 through N + # inclusive. While N was already extracted above, we don't want to rely + # upon this implementation detail of llvm-ar. + for member, count in counter.items(): + if count <= 1: + continue + for current in range(1, count + 1): # extract the file from archive output = subprocess.check_output( - [ar_path, "xN", str(current), archive_path, member], - cwd=objects_path, - ).decode() - assert not output - # rename the newly extracted file - output = subprocess.check_output( - ["mv", member, current_file], cwd=objects_path - ).decode() - assert not output - # rename the tmp file back to 'member' - output = subprocess.check_output( - ["mv", tmp_filename, member], cwd=objects_path + [ + ar_path, + "xN", + str(current), + archive_path, + member, + ], + cwd=temp_dir, ).decode() - assert not output - known_objects.append(_gen_path(objects_path, current_file)) + unique_name = _gen_filename(member, current) + # rename and move the newly extracted file to objects_path + shutil.move( + os.path.join(temp_dir, member), + os.path.join(os.path.abspath(objects_path), unique_name), + ) + if current > 1: + known_objects.append(_gen_path(objects_path, unique_name)) elif file_type == ArchiveKind.THIN_ARCHIVE: output = subprocess.check_output([args.ar, "t", args.archive]).decode() diff --git a/prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py b/prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py index 454690ab6ed..b83bd40bd90 100644 --- a/prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py +++ b/prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py @@ -8,7 +8,7 @@ import unittest -from cxx.dist_lto.tools.dist_lto_opt import _filter_flags +from cxx.dist_lto.tools.dist_lto_opt_gnu import _filter_flags class TestDistLtoOpt(unittest.TestCase): @@ -51,16 +51,13 @@ def test_filter_flags_hhvm_case_rev_0f8618f31(self): "--target=x86_64-redhat-linux-gnu", "-nostdinc", "-resource-dir", - "fbcode/third-party-buck/platform010/build/llvm-fb/12/lib/clang/stable", "-idirafter", - "fbcode/third-party-buck/platform010/build/llvm-fb/12/lib/clang/stable/include", "-idirafter", "fbcode/third-party-buck/platform010/build/glibc/include", "-idirafter", "fbcode/third-party-buck/platform010/build/kernel-headers/include", "-Bfbcode/third-party-buck/platform010/build/binutils/x86_64-facebook-linux/bin", "--cflag=--target=x86_64-redhat-linux-gnu", - "--ar=fbcode/third-party-buck/platform010/build/llvm-fb/12/bin/llvm-ar", "-Bfbcode/third-party-buck/platform010/build/glibc/lib", "-Bfbcode/third-party-buck/platform010/tools/gcc/lib/gcc/x86_64-redhat-linux-gnu/trunk", "-Lfbcode/third-party-buck/platform010/build/libgcc/lib/gcc/x86_64-facebook-linux/trunk", @@ -68,7 +65,6 @@ def test_filter_flags_hhvm_case_rev_0f8618f31(self): "-Wl,--dynamic-linker,/usr/local/fbcode/platform010/lib/ld.so", "-Wl,--disable-new-dtags", "-Bfbcode/third-party-buck/platform010/build/binutils/x86_64-facebook-linux/bin", - "-Bbuck-out/v2/gen/fbcode/8e3db19fe005003a/third-party-buck/platform010/build/llvm-fb/12/__lld_path__/lld_path/bin", "-Wl,--no-mmap-output-file", "-nodefaultlibs", "--target=x86_64-redhat-linux-gnu", @@ -156,10 +152,7 @@ def test_filter_flags_hhvm_case_rev_0f8618f31(self): def test_filter_flags_unicorn_case_rev_0f8618f31(self): inputs = [ - "--ld=fbcode/third-party-buck/platform010/build/llvm-fb/12/bin/clang++", - "--cc=buck-out/v2/gen/fbcode/8e3db19fe005003a/tools/build/buck/wrappers/__fbcc__/fbcc --cc=fbcode/third-party-buck/platform010/build/llvm-fb/12/bin/clang --target=x86_64-redhat-linux-gnu -nostdinc -resource-dir fbcode/third-party-buck/platform010/build/llvm-fb/12/lib/clang/stable -idirafter fbcode/third-party-buck/platform010/build/llvm-fb/12/lib/clang/stable/include -idirafter fbcode/third-party-buck/platform010/build/glibc/include -idirafter fbcode/third-party-buck/platform010/build/kernel-headers/include -Bfbcode/third-party-buck/platform010/build/binutils/x86_64-facebook-linux/bin", "--cflag=--target=x86_64-redhat-linux-gnu", - "--ar=fbcode/third-party-buck/platform010/build/llvm-fb/12/bin/llvm-ar", "-Bfbcode/third-party-buck/platform010/build/glibc/lib", "-Bfbcode/third-party-buck/platform010/tools/gcc/lib/gcc/x86_64-redhat-linux-gnu/trunk", "-Lfbcode/third-party-buck/platform010/build/libgcc/lib/gcc/x86_64-facebook-linux/trunk", @@ -167,7 +160,6 @@ def test_filter_flags_unicorn_case_rev_0f8618f31(self): "-Wl,--dynamic-linker,/usr/local/fbcode/platform010/lib/ld.so", "-Wl,--disable-new-dtags", "-Bfbcode/third-party-buck/platform010/build/binutils/x86_64-facebook-linux/bin", - "-Bbuck-out/v2/gen/fbcode/8e3db19fe005003a/third-party-buck/platform010/build/llvm-fb/12/__lld_path__/lld_path/bin", "-Wl,--no-mmap-output-file", "-nodefaultlibs", "--target=x86_64-redhat-linux-gnu", diff --git a/prelude/cxx/dwp.bzl b/prelude/cxx/dwp.bzl index c3cafa7ba89..76e6e3c4e5d 100644 --- a/prelude/cxx/dwp.bzl +++ b/prelude/cxx/dwp.bzl @@ -24,17 +24,17 @@ def run_dwp_action( referenced_objects: [ArgLike, list[Artifact]], dwp_output: Artifact, local_only: bool): - args = cmd_args() dwp = toolchain.binary_utilities_info.dwp - # llvm trunk now supports 64-bit debug cu indedx, add --continue-on-cu-index-overflow by default - # to suppress dwp file overflow warning - args.add("/bin/sh", "-c", '"$1" --continue-on-cu-index-overflow -o "$2" -e "$3" && touch "$2"', "") - args.add(dwp, dwp_output.as_output(), obj) - - # All object/dwo files referenced in the library/executable are implicitly - # processed by dwp. - args.hidden(referenced_objects) + args = cmd_args( + # llvm trunk now supports 64-bit debug cu indedx, add --continue-on-cu-index-overflow by default + # to suppress dwp file overflow warning + ["/bin/sh", "-c", '"$1" --continue-on-cu-index-overflow -o "$2" -e "$3" && touch "$2"', ""] + + [dwp, dwp_output.as_output(), obj], + # All object/dwo files referenced in the library/executable are implicitly + # processed by dwp. + hidden = referenced_objects, + ) category = "dwp" if category_suffix != None: diff --git a/prelude/builtin.bzl b/prelude/cxx/gcno.bzl similarity index 65% rename from prelude/builtin.bzl rename to prelude/cxx/gcno.bzl index ef1dd61fc71..cfb9d85b071 100644 --- a/prelude/builtin.bzl +++ b/prelude/cxx/gcno.bzl @@ -5,8 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# Definitions we have builtin to Buck. -# Useful for running the Starlark checker on the files. - -def DefaultInfo(): - pass +# Provider that exposes the .gcno files produced during compilation +GcnoFilesInfo = provider(fields = { + "gcno_files": provider_field(list[Artifact]), +}) diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index 72185a6942e..62a027050de 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -5,10 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load( - "@prelude//linking:link_info.bzl", - "Linkage", -) +load("@prelude//cxx:groups_types.bzl", "Traversal") +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//utils:build_target_pattern.bzl", "BuildTargetPattern", @@ -16,7 +14,7 @@ load( ) load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal_by", + "depth_first_traversal_by", ) load( "@prelude//utils:strings.bzl", @@ -24,62 +22,18 @@ load( ) load( "@prelude//utils:utils.bzl", - "map_val", "value_or", ) - -# Types of group traversal -Traversal = enum( - # Includes the target and all of it's transitive dependencies in the group. - "tree", - # Includes only the target in the group. - "node", - # Uses pattern and separates all targets by full folder path. - "subfolders", -) - -# Optional type of filtering -FilterType = enum( - # Filters for targets with labels matching the regex pattern defined after `label:`. - "label", - # Filters for targets for the build target pattern defined after "pattern:". - "pattern", - # Filters for targets matching the regex pattern defined after "target_regex:". - "target_regex", -) - -BuildTargetFilter = record( - pattern = field(BuildTargetPattern), - _type = field(FilterType, FilterType("pattern")), -) - -LabelFilter = record( - regex = regex, - _type = field(FilterType, FilterType("label")), -) - -TargetRegexFilter = record( - regex = regex, - _type = field(FilterType, FilterType("target_regex")), -) - -# Label for special group mapping which makes every target associated with it to be included in all groups -MATCH_ALL_LABEL = "MATCH_ALL" - -# Label for special group mapping which makes every target associated with it to be linked directly -# against the final binary -NO_MATCH_LABEL = "NO_MATCH" - -# Representation of a parsed group mapping -GroupMapping = record( - # The root to apply this mapping to. - root = field([Label, None], None), - # The type of traversal to use. - traversal = field(Traversal, Traversal("tree")), - # Optional filter type to apply to the traversal. - filters = field(list[[BuildTargetFilter, LabelFilter, TargetRegexFilter]], []), - # Preferred linkage for this target when added to a link group. - preferred_linkage = field([Linkage, None], None), +load( + ":groups_types.bzl", + "BuildTargetFilter", + "FilterType", + "Group", + "GroupAttrs", + "GroupDefinition", + "GroupMapping", + "LabelFilter", + "TargetRegexFilter", ) _VALID_ATTRS = [ @@ -89,46 +43,29 @@ _VALID_ATTRS = [ "discard_group", "linker_flags", "requires_root_node_exists", + "prohibit_file_duplicates", + "prefer_optimized_experimental", ] -# Representation of group attributes -GroupAttrs = record( - # Use distributed thinlto to build the link group shared library. - enable_distributed_thinlto = field(bool, False), - # Enable this link group if the binary's node count exceeds the given threshold - enable_if_node_count_exceeds = field([int, None], None), - # Discard all dependencies in the link group, useful for dropping unused dependencies - # from the build graph. - discard_group = field(bool, False), - # Adds additional linker flags used to link the link group shared object. - linker_flags = field(list, []), - # Adds additional linker flags to apply to dependents that link against the - # link group's shared object. - exported_linker_flags = field(list, []), - # Requires root nodes in specs to always exist in dependency graph. - # Otherwise fails. - requires_root_node_exists = field(bool, True), -) - -# Types of group traversal -GroupDefinition = enum( - # Group is explicitly defined in mapping provided by user. - # That is the default behavior. - "explicit", - # Group is implicitly created during mapping computations. - # For example, group can be created for "subfolders" traversal. - "implicit", -) - -# Representation of a parsed group -Group = record( - # The name for this group. - name = str, - # The mappings that are part of this group. - mappings = list[GroupMapping], - attrs = GroupAttrs, - definition_type = field(GroupDefinition, GroupDefinition("explicit")), -) +# Traversal types in this list will only assign the node +# to a target (as opposed to the transitive deps of the node's tree). +_TRAVERSALS_TO_ASSIGN_NODE = [ + Traversal("node"), + Traversal("subfolders"), + # TODO (dust): Possible perf optimization: + # When intersecting configured targets, it's not possible to intersect + # a parent without also intersecting it's children. + # + # As a result, there's a possible perf optimization to assign 'tree' + # to intersected targets instead, and leverage that to avoid traversing + # the entire tree of every root. + # + # For example: + # If iterating the tree of 'root2' we find a node which + # was also present in 'root1', we can skip traversing the subtree + # because it's evitable that everything is going to match there too. + Traversal("intersect_any_roots"), +] # Creates a group from an existing group, overwriting any properties provided def create_group( @@ -144,6 +81,10 @@ def create_group( definition_type = value_or(definition_type, group.definition_type), ) +def get_roots_from_mapping(mapping): + deps = mapping[0] if type(mapping[0]) == "list" else [mapping[0]] + return filter(None, deps) + def parse_groups_definitions( map: list, # Function to parse a root label from the input type, allowing different @@ -165,17 +106,25 @@ def parse_groups_definitions( discard_group = attrs.get("discard_group", False), linker_flags = attrs.get("linker_flags", []), requires_root_node_exists = attrs.get("requires_root_node_exists", True), + prohibit_file_duplicates = attrs.get("prohibit_file_duplicates", False), + prefer_optimized_experimental = attrs.get("prefer_optimized_experimental", False), ) parsed_mappings = [] for entry in mappings: traversal = _parse_traversal_from_mapping(entry[1]) mapping = GroupMapping( - root = map_val(parse_root, entry[0]), + roots = filter(None, [parse_root(root) for root in get_roots_from_mapping(entry)]), traversal = traversal, filters = _parse_filter_from_mapping(entry[2]), preferred_linkage = Linkage(entry[3]) if len(entry) > 3 and entry[3] else None, ) + num_roots = len(mapping.roots) if mapping.roots else 0 + if num_roots > 1 and mapping.traversal != Traversal("intersect_any_roots"): + fail("Invariant. A link_group mapping with traversal type: {} can only have 1 root node. {} found.".format(mapping.traversal, mapping.roots)) + elif mapping.traversal == Traversal("intersect_any_roots") and num_roots < 2: + fail("Invariant. A link_group mapping with traversal type 'intersect' must have at least 2 root nodes. {} found.".format(mapping.roots)) + parsed_mappings.append(mapping) group = Group( @@ -195,6 +144,8 @@ def _parse_traversal_from_mapping(entry: str) -> Traversal: return Traversal("node") elif entry == "subfolders": return Traversal("subfolders") + elif entry == "intersect_any_roots": + return Traversal("intersect_any_roots") else: fail("Unrecognized group traversal type: " + entry) @@ -250,14 +201,43 @@ def compute_mappings(groups_map: dict[str, Group], graph_map: dict[Label, typing return target_to_group_map +def get_dedupped_roots_from_groups(groups: list[Group]) -> list[Label]: + roots = {} + for group in groups: + for mapping in group.mappings: + if not mapping.roots: + continue + + for root in mapping.roots: + roots[root] = True + + return list(roots.keys()) + def _find_targets_in_mapping( graph_map: dict[Label, typing.Any], mapping: GroupMapping) -> list[Label]: # If we have no filtering, we don't need to do any traversal to find targets to include. if not mapping.filters: - if mapping.root == None: - fail("no filter or explicit root given: {}", mapping) - return [mapping.root] + if not mapping.roots: + # Some link groups may want to partially define their mapping roots based on constraint + # that potentially can be resolved to `None`. + # + # E.g: + # ``` + # ("evict-mkl", [ + # (":mkl_ilp64_omp", "node", None, "shared"), + # (select( + # {"DEFAULT": None, "ovr_config//runtime:platform010": "//IntelComposerXE:mkl_ilp64_omp" }), + # "node", None, "shared" + # ), + # ]) + # ``` + # Second mapping will be resolved to `(None, "node", None, "shared")` and will not handle anything. + # There is no convenient way to gracefully handle that in user-facing link groups API. + return [] + + elif mapping.traversal != Traversal("intersect_any_roots"): + return mapping.roots # Else find all dependencies that match the filter. matching_targets = {} @@ -284,7 +264,7 @@ def _find_targets_in_mapping( return False return True - def find_matching_targets(node): # Label -> [Label]: + def populate_matching_targets(node): # Label -> bool: graph_node = graph_map[node] if matches_target(node, graph_node.labels): matching_targets[node] = None @@ -292,17 +272,77 @@ def _find_targets_in_mapping( # We can stop traversing the tree at this point because we've added the # build target to the list of all targets that will be traversed by the # algorithm that applies the groups. - return [] - return graph_node.deps + graph_node.exported_deps + return False + return True - if mapping.root == None: + def populate_matching_targets_bfs_wrapper(node): # (Label) -> list + if populate_matching_targets(node): + graph_node = graph_map[node] + return graph_node.deps + graph_node.exported_deps + return [] + + if not mapping.roots: for node in graph_map: - find_matching_targets(node) + populate_matching_targets(node) + elif mapping.traversal == Traversal("intersect_any_roots"): + targets_to_counter = {} + for root in mapping.roots: + # This is a captured variable inside `populate_matching_targets`. + # We reset it for each root we visit so that we don't have results + # from other roots. + matching_targets = {} + depth_first_traversal_by(graph_map, [root], populate_matching_targets_bfs_wrapper) + for t in matching_targets: + targets_to_counter[t] = targets_to_counter.get(t, 0) + 1 + + return [ + t + for t, count in targets_to_counter.items() + if count > 1 + ] else: - breadth_first_traversal_by(graph_map, [mapping.root], find_matching_targets) + depth_first_traversal_by(graph_map, mapping.roots, populate_matching_targets_bfs_wrapper) return matching_targets.keys() +# Extracted from `_update_target_to_group_mapping` to avoid function allocations inside the loop +def _assign_target_to_group( + target_to_group_map, #: {"label": str} + node_traversed_targets, #: {"label": None} + group, # Group, + groups_map, # {str: Group} + mapping, # GroupMapping + target, # Label + node_traversal): # bool + # If the target hasn't already been assigned to a group, assign it to the + # first group claiming the target. Return whether the target was already assigned. + if target not in target_to_group_map: + if mapping.traversal == Traversal("subfolders"): + generated_group_name = _generate_group_subfolder_name(group.name, target.package) + _add_to_implicit_link_group(generated_group_name, group, groups_map, target_to_group_map, target) + else: + target_to_group_map[target] = group.name + + if node_traversal: + node_traversed_targets[target] = None + return False + else: + return True + +# Extracted from `_update_target_to_group_mapping` to avoid function allocations inside the loop +def _transitively_add_targets_to_group_mapping( + assign_target_to_group, # (Label, bool) -> bool + node_traversed_targets, #: {"label": None} + graph_map, # {"label": "_b"} + node): # ([Label]) -> None + previously_processed = assign_target_to_group(node, False) + + # If the node has been previously processed, and it was via tree (not node), all child nodes have been assigned + if previously_processed and node not in node_traversed_targets: + return None + graph_node = graph_map[node] + return graph_node.deps + graph_node.exported_deps + # Types removed to avoid unnecessary type checking which degrades performance. def _update_target_to_group_mapping( graph_map, # {"label": "_b"} @@ -312,37 +352,13 @@ def _update_target_to_group_mapping( groups_map, # {str: Group} mapping, # GroupMapping target): # Label - def assign_target_to_group( - target: Label, - node_traversal: bool) -> bool: - # If the target hasn't already been assigned to a group, assign it to the - # first group claiming the target. Return whether the target was already assigned. - if target not in target_to_group_map: - if mapping.traversal == Traversal("subfolders"): - generated_group_name = _generate_group_subfolder_name(group.name, target.package) - _add_to_implicit_link_group(generated_group_name, group, groups_map, target_to_group_map, target) - else: - target_to_group_map[target] = group.name - - if node_traversal: - node_traversed_targets[target] = None - return False - else: - return True - - def transitively_add_targets_to_group_mapping(node: Label) -> list[Label]: - previously_processed = assign_target_to_group(target = node, node_traversal = False) - - # If the node has been previously processed, and it was via tree (not node), all child nodes have been assigned - if previously_processed and node not in node_traversed_targets: - return [] - graph_node = graph_map[node] - return graph_node.deps + graph_node.exported_deps + assign_target_to_group = partial(_assign_target_to_group, target_to_group_map, node_traversed_targets, group, groups_map, mapping) # (Label, bool) -> bool + transitively_add_targets_to_group_mapping = partial(_transitively_add_targets_to_group_mapping, assign_target_to_group, node_traversed_targets, graph_map) # (Label) -> list[Label] - if mapping.traversal == Traversal("node") or mapping.traversal == Traversal("subfolders"): - assign_target_to_group(target = target, node_traversal = True) + if mapping.traversal in _TRAVERSALS_TO_ASSIGN_NODE: + assign_target_to_group(target, True) else: # tree - breadth_first_traversal_by(graph_map, [target], transitively_add_targets_to_group_mapping) + depth_first_traversal_by(graph_map, [target], transitively_add_targets_to_group_mapping) def _add_to_implicit_link_group( generated_group_name, # str @@ -406,7 +422,7 @@ def _make_json_info_for_group_mapping(group_mapping: GroupMapping) -> dict[str, return { "filters": _make_json_info_for_group_mapping_filters(group_mapping.filters), "preferred_linkage": group_mapping.preferred_linkage, - "root": group_mapping.root, + "roots": group_mapping.roots, "traversal": group_mapping.traversal, } @@ -422,5 +438,5 @@ def make_info_subtarget_providers(ctx: AnalysisContext, groups: list[Group], map "groups": {group.name: _make_json_info_for_group(group) for group in groups}, "mappings": mappings, } - json_output = ctx.actions.write_json("link_group_map_info.json", info_json) + json_output = ctx.actions.write_json("link_group_map_info.json", info_json, pretty = True) return [DefaultInfo(default_output = json_output)] diff --git a/prelude/cxx/groups_types.bzl b/prelude/cxx/groups_types.bzl new file mode 100644 index 00000000000..f5700966882 --- /dev/null +++ b/prelude/cxx/groups_types.bzl @@ -0,0 +1,113 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//linking:types.bzl", "Linkage") +load( + "@prelude//utils:build_target_pattern.bzl", + "BuildTargetPattern", +) + +# Label for special group mapping which makes every target associated with it to be included in all groups +MATCH_ALL_LABEL = "MATCH_ALL" + +# Label for special group mapping which makes every target associated with it to be linked directly +# against the final binary +NO_MATCH_LABEL = "NO_MATCH" + +Traversal = enum( + # Includes the target and all of it's transitive dependencies in the group. + "tree", + # Includes only the target in the group. + "node", + # Uses pattern and separates all targets by full folder path. + "subfolders", + # Includes targets found in the transitive deps of *any* roots. + # Filters for these mappings will be applied to the intersected deps. + "intersect_any_roots", +) + +# Optional type of filtering +FilterType = enum( + # Filters for targets with labels matching the regex pattern defined after `label:`. + "label", + # Filters for targets for the build target pattern defined after "pattern:". + "pattern", + # Filters for targets matching the regex pattern defined after "target_regex:". + "target_regex", +) + +BuildTargetFilter = record( + pattern = field(BuildTargetPattern), + _type = field(FilterType, FilterType("pattern")), +) + +LabelFilter = record( + regex = regex, + _type = field(FilterType, FilterType("label")), +) + +TargetRegexFilter = record( + regex = regex, + _type = field(FilterType, FilterType("target_regex")), +) + +# Representation of a parsed group mapping +GroupMapping = record( + # The root to apply this mapping to. + roots = field(list[Label], []), + # The type of traversal to use. + traversal = field(Traversal, Traversal("tree")), + # Optional filter type to apply to the traversal. + filters = field(list[[BuildTargetFilter, LabelFilter, TargetRegexFilter]], []), + # Preferred linkage for this target when added to a link group. + preferred_linkage = field([Linkage, None], None), +) + +# Representation of group attributes +GroupAttrs = record( + # Use distributed thinlto to build the link group shared library. + enable_distributed_thinlto = field(bool, False), + # Enable this link group if the binary's node count exceeds the given threshold + enable_if_node_count_exceeds = field([int, None], None), + # Discard all dependencies in the link group, useful for dropping unused dependencies + # from the build graph. + discard_group = field(bool, False), + # Adds additional linker flags used to link the link group shared object. + linker_flags = field(list, []), + # Adds additional linker flags to apply to dependents that link against the + # link group's shared object. + exported_linker_flags = field(list, []), + # Requires root nodes in specs to always exist in dependency graph. + # Otherwise fails. + requires_root_node_exists = field(bool, True), + # For certain wide-scale generic link groups we want to enable + # initial duplicate analysis. This is useful for detecting dduplicated symbols problem early + # for automatoc link groups that we not aware about (e.g. evicting whole root package folder into link group) + prohibit_file_duplicates = field(bool, False), + # Uses optimized compilation outputs if available. + prefer_optimized_experimental = field(bool, False), +) + +# Types of group traversal +GroupDefinition = enum( + # Group is explicitly defined in mapping provided by user. + # That is the default behavior. + "explicit", + # Group is implicitly created during mapping computations. + # For example, group can be created for "subfolders" traversal. + "implicit", +) + +# Representation of a parsed group +Group = record( + # The name for this group. + name = str, + # The mappings that are part of this group. + mappings = list[GroupMapping], + attrs = GroupAttrs, + definition_type = field(GroupDefinition, GroupDefinition("explicit")), +) diff --git a/prelude/cxx/headers.bzl b/prelude/cxx/headers.bzl index 21faa874195..c78c16ecb27 100644 --- a/prelude/cxx/headers.bzl +++ b/prelude/cxx/headers.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerType") load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:lazy.bzl", "lazy") @@ -67,7 +68,7 @@ HeaderStyle = enum( Headers = record( include_path = field(cmd_args), # NOTE(agallagher): Used for module hack replacement. - symlink_tree = field([Artifact, None], None), + symlink_tree = field(Artifact | None, None), # args that map symlinked private headers to source path file_prefix_args = field([cmd_args, None], None), ) @@ -111,15 +112,16 @@ CPrecompiledHeaderInfo = provider(fields = { def cxx_attr_header_namespace(ctx: AnalysisContext) -> str: return value_or(ctx.attrs.header_namespace, ctx.label.package) -def cxx_attr_exported_headers(ctx: AnalysisContext, headers_layout: CxxHeadersLayout) -> list[CHeader]: - headers = _get_attr_headers(ctx.attrs.exported_headers, headers_layout.namespace, headers_layout.naming) - platform_headers = _get_attr_headers(_headers_by_platform(ctx, ctx.attrs.exported_platform_headers), headers_layout.namespace, headers_layout.naming) +def cxx_attr_headers_list(ctx: AnalysisContext, headers: typing.Any, platform_headers: typing.Any, headers_layout: CxxHeadersLayout) -> list[CHeader]: + headers = _get_attr_headers(headers, headers_layout.namespace, headers_layout.naming) + platform_headers = _get_attr_headers(_headers_by_platform(ctx, platform_headers), headers_layout.namespace, headers_layout.naming) return headers + platform_headers +def cxx_attr_exported_headers(ctx: AnalysisContext, headers_layout: CxxHeadersLayout) -> list[CHeader]: + return cxx_attr_headers_list(ctx, ctx.attrs.exported_headers, ctx.attrs.exported_platform_headers, headers_layout) + def cxx_attr_headers(ctx: AnalysisContext, headers_layout: CxxHeadersLayout) -> list[CHeader]: - headers = _get_attr_headers(ctx.attrs.headers, headers_layout.namespace, headers_layout.naming) - platform_headers = _get_attr_headers(_headers_by_platform(ctx, ctx.attrs.platform_headers), headers_layout.namespace, headers_layout.naming) - return headers + platform_headers + return cxx_attr_headers_list(ctx, ctx.attrs.headers, ctx.attrs.platform_headers, headers_layout) def cxx_get_regular_cxx_headers_layout(ctx: AnalysisContext) -> CxxHeadersLayout: namespace = cxx_attr_header_namespace(ctx) @@ -182,7 +184,7 @@ def _header_mode(ctx: AnalysisContext) -> HeaderMode: return toolchain_header_mode -def prepare_headers(ctx: AnalysisContext, srcs: dict[str, Artifact], name: str, project_root_file: [Artifact, None]) -> [Headers, None]: +def prepare_headers(ctx: AnalysisContext, srcs: dict[str, Artifact], name: str) -> [Headers, None]: """ Prepare all the headers we want to use, depending on the header_mode set on the target's toolchain. @@ -204,23 +206,23 @@ def prepare_headers(ctx: AnalysisContext, srcs: dict[str, Artifact], name: str, lazy.is_any(lambda n: paths.basename(n) == "module.modulemap", srcs.keys())): header_mode = HeaderMode("symlink_tree_only") - output_name = name + "-abs" if project_root_file else name + output_name = name if header_mode == HeaderMode("header_map_only"): headers = {h: (a, "{}") for h, a in srcs.items()} - hmap = _mk_hmap(ctx, output_name, headers, project_root_file) + hmap = _mk_hmap(ctx, output_name, headers) return Headers( - include_path = cmd_args(hmap).hidden(srcs.values()), + include_path = cmd_args(hmap, hidden = srcs.values()), ) symlink_dir = ctx.actions.symlinked_dir(output_name, _normalize_header_srcs(srcs)) if header_mode == HeaderMode("symlink_tree_only"): return Headers(include_path = cmd_args(symlink_dir), symlink_tree = symlink_dir) if header_mode == HeaderMode("symlink_tree_with_header_map"): headers = {h: (symlink_dir, "{}/" + h) for h in srcs} - hmap = _mk_hmap(ctx, output_name, headers, project_root_file) + hmap = _mk_hmap(ctx, output_name, headers) file_prefix_args = _get_debug_prefix_args(ctx, symlink_dir) return Headers( - include_path = cmd_args(hmap).hidden(symlink_dir), + include_path = cmd_args(hmap, hidden = symlink_dir), symlink_tree = symlink_dir, file_prefix_args = file_prefix_args, ) @@ -333,31 +335,31 @@ def _get_dict_header_namespace(namespace: str, naming: CxxHeadersNaming) -> str: def _get_debug_prefix_args(ctx: AnalysisContext, header_dir: Artifact) -> [cmd_args, None]: # NOTE(@christylee): Do we need to enable debug-prefix-map for darwin and windows? - if get_cxx_toolchain_info(ctx).linker_info.type != "gnu": + if get_cxx_toolchain_info(ctx).linker_info.type != LinkerType("gnu"): return None - debug_prefix_args = cmd_args() fmt = "-fdebug-prefix-map={}=" + value_or(header_dir.owner.cell, ".") - debug_prefix_args.add( + return cmd_args( cmd_args(header_dir, format = fmt), ) - return debug_prefix_args -def _mk_hmap(ctx: AnalysisContext, name: str, headers: dict[str, (Artifact, str)], project_root_file: [Artifact, None]) -> Artifact: +def _mk_hmap(ctx: AnalysisContext, name: str, headers: dict[str, (Artifact, str)]) -> Artifact: output = ctx.actions.declare_output(name + ".hmap") - cmd = cmd_args(get_cxx_toolchain_info(ctx).mk_hmap) - cmd.add(["--output", output.as_output()]) header_args = cmd_args() for n, (path, fmt) in headers.items(): header_args.add(n) # We don't care about the header contents -- just their names. - header_args.add(cmd_args(path, format = fmt).ignore_artifacts()) + header_args.add(cmd_args(path, format = fmt, ignore_artifacts = True)) + + hmap_args_file = ctx.actions.write(output.basename + ".cxx_hmap_argsfile", cmd_args(header_args, quote = "shell")) - hmap_args_file = ctx.actions.write(output.basename + ".argsfile", cmd_args(header_args, quote = "shell")) - cmd.add(["--mappings-file", hmap_args_file]).hidden(header_args) - if project_root_file: - cmd.add(["--project-root-file", project_root_file]) + cmd = cmd_args( + [get_cxx_toolchain_info(ctx).internal_tools.hmap_wrapper] + + ["--output", output.as_output()] + + ["--mappings-file", hmap_args_file], + hidden = header_args, + ) ctx.actions.run(cmd, category = "generate_hmap", identifier = name, allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs)) return output diff --git a/prelude/cxx/index_store.bzl b/prelude/cxx/index_store.bzl new file mode 100644 index 00000000000..ca00472ce63 --- /dev/null +++ b/prelude/cxx/index_store.bzl @@ -0,0 +1,139 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") + +INDEX_STORE_SUBTARGET = "index-store" +FULL_INDEX_STORE_SUBTARGET = "full-index-store" + +# Magic number here. There is a trade off here: +# more buckets more materializion time but less time to wait from RE to merge bucket +# less buckets less materializion time but more time to wait from RE to merge bucket +_BUCK_COUNT = 20 + +def _index_store_args(artifact: Artifact) -> Artifact: + return artifact + +IndexStoreTSet = transitive_set( + args_projections = { + "args": _index_store_args, + }, +) + +IndexStoreInfo = provider( + fields = { + # The name of the target. + "name": provider_field(str), + # A tset with this target's index store and all of its dependency's index stores in the children. + "tset": provider_field(IndexStoreTSet), + }, +) + +def _get_merge_index_store_tool(ctx: AnalysisContext) -> RunInfo | None: + apple_toolchain = getattr(ctx.attrs, "_apple_toolchain", None) + if apple_toolchain == None: + return None + return apple_toolchain[AppleToolchainInfo].merge_index_store + +def _merge_index_store(ctx: AnalysisContext, index_stores: list[Artifact] | TransitiveSet, merge_output_dir_name: str | None = None) -> Artifact | None: + if isinstance(index_stores, list): + if len(index_stores) == 0: + return None + + if len(index_stores) == 1: + return index_stores[0] + + merge_index_store_tool = _get_merge_index_store_tool(ctx) + if merge_index_store_tool == None: + return None + if merge_output_dir_name == None: + merge_output_dir_name = paths.join("__indexstore__", ctx.attrs.name, "index_store") + merged_index_store = ctx.actions.declare_output(merge_output_dir_name) + cmd = cmd_args([merge_index_store_tool]) + cmd.add(["--dest", merged_index_store.as_output()]) + cmd.add(["--sources"]) + if isinstance(index_stores, list): + cmd.add(index_stores) + else: + cmd.add(index_stores.project_as_args("args")) + + # use prefer_remote = True here, it would have two following advantages + # 1. Each bucket will perform a merge on RE, which will fully utilize the high-speed network for materalizaion + # and utalize the resource to mergre parallel. + # 2. After merging for each bucket, the index store will be smaller, which makes it less to materialize locally + # and speeds up the local merge, thus accelerating the overall process. + ctx.actions.run(cmd, category = "merge_index_store", identifier = merge_output_dir_name, allow_cache_upload = True, prefer_remote = True) + return merged_index_store + +def _hash_bucket_index_stores(index_stores: list[Artifact]): + buckets_to_artifacts = {} + for index_store in index_stores: + hash_value = hash(index_store.short_path) + bucket = hash_value % _BUCK_COUNT + if bucket not in buckets_to_artifacts: + buckets_to_artifacts[bucket] = [] + buckets_to_artifacts[bucket].append(index_store) + return buckets_to_artifacts + +def _merge_all_index_store(ctx: AnalysisContext, index_stores: TransitiveSet) -> Artifact | None: + index_store_output_path = read_config("apple", "index_store_output", None) + if index_store_output_path: + index_stores = list(index_stores.traverse()) + merge_index_store_tool = _get_merge_index_store_tool(ctx) + if merge_index_store_tool == None: + return None + outputs = [] + + buckets_to_artifacts = _hash_bucket_index_stores(index_stores) + + for bucket in buckets_to_artifacts: + index_stores = buckets_to_artifacts[bucket] + merged_bucket_index_store = _merge_index_store(ctx, index_stores, merge_output_dir_name = "merge_bucket/{}/index_store".format(bucket)) + + name = "loal_merge/{}/index_store".format(bucket) + local_merged_index_store = ctx.actions.declare_output(name, dir = False) # this is a dummy output, it will be empty. It is used to make buck2 to run the actions + cmd = cmd_args([merge_index_store_tool, "--dest", index_store_output_path, "--sources", merged_bucket_index_store]) + cmd.add(["--dummy-output", local_merged_index_store.as_output()]) + + # each index_store will run local action to merge to the same local index store, + # in this cases , each index store will not wait for all index stores to be materalized + ctx.actions.run(cmd, category = "index_store_local_merge", identifier = name, local_only = True) + outputs.append(local_merged_index_store) + + final_output = ctx.actions.declare_output("dummy_final_local_merge", dir = False) # this is a dummy output, it will be empty. It is used to make buck2 to run the actions + + # Pass `outputs` to hidden to make the `final_output` depends on the `outputs``. + cmd = cmd_args(["touch", final_output.as_output()], hidden = outputs) + + ctx.actions.run(cmd, category = "index_store_local_merge", identifier = "final_local_merge (dummy)", local_only = True) + return final_output + else: + return _merge_index_store(ctx, index_stores, paths.join("__indexstore__", ctx.attrs.name, "full_index_stores")) + +def _gather_deps_index_store_tsets(deps: list[Dependency]) -> list[IndexStoreTSet]: + deps_indexstore_infos = filter(None, [dep.get(IndexStoreInfo) for dep in deps]) + return [info.tset for info in deps_indexstore_infos] + +def create_index_store_subtargets_and_provider(ctx: AnalysisContext, current_target_index_stores: list[Artifact], deps: list[Dependency]) -> (dict[str, list[Provider]], IndexStoreInfo): + # Create a subtarget for the current target's index store + sub_targets = {} + merged_index_store = _merge_index_store(ctx, current_target_index_stores) + sub_targets[INDEX_STORE_SUBTARGET] = [DefaultInfo(default_output = merged_index_store)] + + # Crate a subtarget for the merged all deps' and itself's index store + deps_indexstore_tsets = _gather_deps_index_store_tsets(deps) + if merged_index_store: + index_store_tset = ctx.actions.tset(IndexStoreTSet, value = merged_index_store, children = deps_indexstore_tsets) + else: + index_store_tset = ctx.actions.tset(IndexStoreTSet, children = deps_indexstore_tsets) + index_store_info = IndexStoreInfo(name = ctx.attrs.name, tset = index_store_tset) + + output = _merge_all_index_store(ctx, index_store_tset) + sub_targets[FULL_INDEX_STORE_SUBTARGET] = [DefaultInfo(default_output = output)] + + return (sub_targets, index_store_info) diff --git a/prelude/cxx/link.bzl b/prelude/cxx/link.bzl index 62466288658..2004977cc14 100644 --- a/prelude/cxx/link.bzl +++ b/prelude/cxx/link.bzl @@ -16,10 +16,18 @@ load( "bolt", "cxx_use_bolt", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) +load( + "@prelude//cxx/dist_lto:darwin_dist_lto.bzl", + "cxx_darwin_dist_link", +) load( "@prelude//cxx/dist_lto:dist_lto.bzl", - "cxx_dist_link", + "cxx_gnu_dist_link", ) load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference", "LinkExecutionPreferenceInfo", "get_action_execution_attributes") load( @@ -34,8 +42,10 @@ load( ) load( "@prelude//linking:lto.bzl", + "LtoMode", "get_split_debug_lto_info", ) +load("@prelude//linking:stamp_build_info.bzl", "stamp_build_info") load("@prelude//linking:strip.bzl", "strip_object") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "map_val", "value_or") @@ -60,6 +70,7 @@ load(":link_types.bzl", "CxxLinkResultType", "LinkOptions", "merge_link_options" load( ":linker.bzl", "SharedLibraryFlagOverrides", # @unused Used as a type + "get_deffile_flags", "get_import_library", "get_output_flags", "get_shared_library_flags", @@ -83,8 +94,8 @@ CxxLinkResult = record( def link_external_debug_info( ctx: AnalysisContext, links: list[LinkArgs], - split_debug_output: [Artifact, None] = None, - pdb: [Artifact, None] = None) -> ArtifactTSet: + split_debug_output: Artifact | None = None, + pdb: Artifact | None = None) -> ArtifactTSet: external_debug_artifacts = [] # When using LTO+split-dwarf, the link step will generate externally @@ -133,21 +144,32 @@ def cxx_link_into( linker_map_data = None if linker_info.supports_distributed_thinlto and opts.enable_distributed_thinlto: - if not linker_info.requires_objects: - fail("Cannot use distributed thinlto if the cxx toolchain doesn't require_objects") + if not linker_info.lto_mode == LtoMode("thin"): + fail("Cannot use distributed thinlto if the cxx toolchain doesn't use thin-lto lto_mode") sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain_info, output) if sanitizer_runtime_args.extra_link_args or sanitizer_runtime_args.sanitizer_runtime_files: fail("Cannot use distributed thinlto with sanitizer runtime") - exe = cxx_dist_link( - ctx, - opts.links, - output, - linker_map, - opts.category_suffix, - opts.identifier, - should_generate_dwp, - is_result_executable, - ) + + linker_type = linker_info.type + if linker_type == LinkerType("darwin"): + exe = cxx_darwin_dist_link( + ctx, + output, + opts, + linker_map, + ) + elif linker_type == LinkerType("gnu"): + exe = cxx_gnu_dist_link( + ctx, + output, + opts, + linker_map, + should_generate_dwp, + is_result_executable, + ) + else: + fail("Linker type {} not supported for distributed thin-lto".format(linker_type)) + return CxxLinkResult( linked_object = exe, linker_map_data = linker_map_data, @@ -166,9 +188,6 @@ def cxx_link_into( all_link_args = cmd_args(link_cmd_parts.linker_flags) all_link_args.add(get_output_flags(linker_info.type, output)) - sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain_info, output) - all_link_args.add(sanitizer_runtime_args.extra_link_args) - # Darwin LTO requires extra link outputs to preserve debug info split_debug_output = None split_debug_lto_info = get_split_debug_lto_info(ctx.actions, cxx_toolchain_info, output.short_path) @@ -186,12 +205,12 @@ def cxx_link_into( else: link_args_suffix = opts.category_suffix link_args_output = make_link_args( + ctx, ctx.actions, cxx_toolchain_info, links_with_linker_map, suffix = link_args_suffix, output_short_path = output.short_path, - is_shared = result_type.value == "shared_library", link_ordering = value_or( opts.link_ordering, # Fallback to toolchain default. @@ -200,6 +219,12 @@ def cxx_link_into( ) all_link_args.add(link_args_output.link_args) + # Sanitizer runtime args must appear at the end because it can affect + # behavior of Swift runtime loading when the app also has an embedded + # Swift runtime. + sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain_info, output) + all_link_args.add(sanitizer_runtime_args.extra_link_args) + bitcode_linkables = [] for link_item in opts.links: if link_item.infos == None: @@ -224,21 +249,25 @@ def cxx_link_into( all_link_args.add(link_cmd_parts.post_linker_flags) - if linker_info.type == "windows": + if linker_info.type == LinkerType("windows"): shell_quoted_args = cmd_args(all_link_args) else: shell_quoted_args = cmd_args(all_link_args, quote = "shell") argfile, _ = ctx.actions.write( - output.short_path + ".linker.argsfile", + output.short_path + ".cxx_link_argsfile", shell_quoted_args, allow_args = True, ) - command = cmd_args(link_cmd_parts.linker) - command.add(cmd_args(argfile, format = "@{}")) - command.hidden(link_args_output.hidden) - command.hidden(shell_quoted_args) + command = cmd_args( + link_cmd_parts.linker, + cmd_args(argfile, format = "@{}"), + hidden = [ + link_args_output.hidden, + shell_quoted_args, + ], + ) category = "cxx_link" if opts.category_suffix != None: category += "_" + opts.category_suffix @@ -247,11 +276,13 @@ def cxx_link_into( # generate a DWO directory, so make sure we at least `mkdir` and empty # one to make v2/RE happy. if split_debug_output != None: - cmd = cmd_args(["/bin/sh", "-c"]) - cmd.add(cmd_args(split_debug_output.as_output(), format = 'mkdir -p {}; "$@"')) - cmd.add('""').add(command) - cmd.hidden(command) - command = cmd + command = cmd_args( + "/bin/sh", + "-c", + cmd_args(split_debug_output.as_output(), format = 'mkdir -p {}; "$@"'), + '""', + command, + ) link_execution_preference_info = LinkExecutionPreferenceInfo( preference = opts.link_execution_preference, @@ -260,6 +291,11 @@ def cxx_link_into( opts.link_execution_preference, ) + # only specify error_handler if one exists + error_handler_args = {} + if opts.error_handler: + error_handler_args["error_handler"] = opts.error_handler + ctx.actions.run( command, prefer_local = action_execution_properties.prefer_local, @@ -270,13 +306,16 @@ def cxx_link_into( identifier = opts.identifier, force_full_hybrid_if_capable = action_execution_properties.full_hybrid, allow_cache_upload = opts.allow_cache_upload, + **error_handler_args ) unstripped_output = output if opts.strip: strip_args = opts.strip_args_factory(ctx) if opts.strip_args_factory else cmd_args() output = strip_object(ctx, cxx_toolchain_info, output, strip_args, opts.category_suffix) - final_output = output if not (is_result_executable and cxx_use_bolt(ctx)) else bolt(ctx, output, opts.identifier) + final_output = output if not (is_result_executable and cxx_use_bolt(ctx)) else bolt(ctx, output, external_debug_info, opts.identifier) + final_output = stamp_build_info(ctx, final_output) if is_result_executable else final_output + dwp_artifact = None if should_generate_dwp: # TODO(T110378144): Once we track split dwarf from compiles, we should @@ -480,7 +519,9 @@ def cxx_link_shared_library( output, ) - links_with_extra_args = [LinkArgs(flags = extra_args)] + opts.links + [LinkArgs(flags = import_library_args)] + deffile_args = get_deffile_flags(ctx, linker_type) + + links_with_extra_args = [LinkArgs(flags = extra_args)] + opts.links + [LinkArgs(flags = import_library_args + deffile_args)] opts = merge_link_options( opts, diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index d86bcdf6268..cdde311ee80 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -6,6 +6,16 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load( + "@prelude//cxx:groups_types.bzl", + "Group", # @unused Used as a type +) +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", + "LinkGroupsDebugLinkableEntry", + "LinkGroupsDebugLinkableItem", +) load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_groups.bzl", @@ -18,7 +28,6 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", # @unused Used as a type "SharedLibLinkable", "get_lib_output_style", @@ -38,11 +47,19 @@ load( "get_linkable_graph_node_map_func", "get_transitive_deps", ) +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraries", + "SharedLibrary", + "Soname", + "create_shlib", +) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal_by", + "depth_first_traversal_by", ) load( "@prelude//utils:set.bzl", @@ -61,12 +78,14 @@ load( load(":cxx_toolchain_types.bzl", "PicBehavior") load( ":groups.bzl", - "Group", # @unused Used as a type - "MATCH_ALL_LABEL", - "NO_MATCH_LABEL", "compute_mappings", "parse_groups_definitions", ) +load( + ":groups_types.bzl", + "MATCH_ALL_LABEL", + "NO_MATCH_LABEL", +) load( ":link.bzl", "cxx_link_shared_library", @@ -107,25 +126,13 @@ LINK_GROUP_MAP_DATABASE_FILENAME = "link_group_map_database.json" LINK_GROUP_MAPPINGS_SUB_TARGET = "link-group-mappings" LINK_GROUP_MAPPINGS_FILENAME_SUFFIX = ".link_group_map.json" -LinkGroupInfo = provider( - # @unsorted-dict-items - fields = { - "groups": provider_field(dict[str, Group]), - "groups_hash": provider_field(int), - "mappings": provider_field(dict[Label, str]), - # Additional graphs needed to cover labels referenced by the groups above. - # This is useful in cases where the consumer of this provider won't already - # have deps covering these. - # NOTE(agallagher): We do this to maintain existing behavior w/ the - # standalone `link_group_map()` rule, but it's not clear if it's actually - # desirable behavior. - "graph": provider_field(LinkableGraph), - }, -) - LinkGroupLinkInfo = record( link_info = field(LinkInfo), output_style = field(LibOutputStyle), + + # Where this link info is originated from. + # Either target label or link group name + link_name = field(Label | str), ) LinkGroupLibSpec = record( @@ -142,6 +149,7 @@ LinkGroupLibSpec = record( root = field([LinkableRootInfo, None], None), # The link group to link. group = field(Group), + label = field(Label | None, None), ) _LinkedLinkGroup = record( @@ -152,6 +160,9 @@ _LinkedLinkGroup = record( _LinkedLinkGroups = record( libs = field(dict[str, _LinkedLinkGroup]), symbol_ldflags = field(list[typing.Any], []), + libs_debug_info = field(dict[typing.Any, typing.Any]), + # Mapping from a target to a link group name it was linked into. + targets_consumed_by_link_groups = field(dict[Label, str]), ) def get_link_group(ctx: AnalysisContext) -> [str, None]: @@ -220,12 +231,22 @@ def get_link_group_info( ) def get_link_group_preferred_linkage(link_groups: list[Group]) -> dict[Label, Linkage]: - return { - mapping.root: mapping.preferred_linkage - for group in link_groups - for mapping in group.mappings - if mapping.root != None and mapping.preferred_linkage != None - } + root_to_linkage = {} + for group in link_groups: + for mapping in group.mappings: + if not mapping.roots: + continue + + if not mapping.preferred_linkage: + continue + + for root in mapping.roots: + # TODO: There might be a bug here - if the same root is listed in + # two different link_group_map entries, we'll only use the preferred_linkage + # of the last style passed. + root_to_linkage[root] = mapping.preferred_linkage + + return root_to_linkage LinkGroupContext = record( link_group_mappings = field([dict[Label, str], None]), @@ -235,6 +256,8 @@ LinkGroupContext = record( link_group_libs = field(dict[str, typing.Any]), link_group_preferred_linkage = field(dict[Label, Linkage]), labels_to_links_map = field(dict[Label, LinkGroupLinkInfo]), + # Mapping from a target to a link group name it was linked into. + targets_consumed_by_link_groups = field(dict[Label, str]), ) def is_link_group_shlib( @@ -263,7 +286,8 @@ def _transitively_update_shared_linkage( link_strategy: LinkStrategy, link_group_preferred_linkage: dict[Label, Linkage], link_group_roots: dict[Label, str], - pic_behavior: PicBehavior): + pic_behavior: PicBehavior, + link_group_mappings: [dict[Label, str], None]): # Identify targets whose shared linkage style may be propagated to # dependencies. Implicitly created root libraries are skipped. shared_lib_roots = [] @@ -279,19 +303,50 @@ def _transitively_update_shared_linkage( shared_lib_roots.append(target) # buildifier: disable=uninitialized - def process_dependency(node: Label) -> list[Label]: + def process_dependency(node: Label) -> list[Label] | None: + if link_group_mappings and link_group_mappings.get(node) == NO_MATCH_LABEL: + # Do not propagate shared linkage via nodes that are excluded from link groups. + return None linkable_node = linkable_graph_node_map[node] if linkable_node.preferred_linkage == Linkage("any"): link_group_preferred_linkage[node] = Linkage("shared") return get_deps_for_link(linkable_node, link_strategy, pic_behavior) - breadth_first_traversal_by( + depth_first_traversal_by( linkable_graph_node_map, shared_lib_roots, process_dependency, ) +def create_debug_linkable_entries( + labels_to_links_map: dict[Label, LinkGroupLinkInfo], + root: Label | None) -> list[LinkGroupsDebugLinkableEntry]: + entries = [] + if root: + root_entry = LinkGroupsDebugLinkableEntry( + name = root, + output_style = LibOutputStyle("pic_archive"), + ) + entries.append(root_entry) + + for link_info in labels_to_links_map.values(): + link_groups_linkable_info = LinkGroupsDebugLinkableEntry( + name = link_info.link_name, + output_style = link_info.output_style, + ) + entries.append(link_groups_linkable_info) + + return entries + +# This stores final information about link arguments +# that maps to linker.argsfile for link group or final binary. +FinalLabelsToLinks = record( + # Static archives and shared libraries inputs. + map = field(dict[Label, LinkGroupLinkInfo]), +) + def get_filtered_labels_to_links_map( + public_nodes: [set_record, None], linkable_graph_node_map: dict[Label, LinkableNode], link_group: [str, None], link_groups: dict[str, Group], @@ -303,7 +358,8 @@ def get_filtered_labels_to_links_map( link_group_libs: dict[str, ([Label, None], LinkInfos)] = {}, prefer_stripped: bool = False, is_executable_link: bool = False, - force_static_follows_dependents: bool = True) -> dict[Label, LinkGroupLinkInfo]: + force_static_follows_dependents: bool = True, + prefer_optimized = False) -> FinalLabelsToLinks: """ Given a linkable graph, link style and link group mappings, finds all links to consider for linking traversing the graph as necessary and then @@ -312,29 +368,18 @@ def get_filtered_labels_to_links_map( """ def get_potential_linkables(node: Label) -> list[Label]: - linkable_node = linkable_graph_node_map[node] # buildifier: disable=uninitialized - - # Always link against exported deps - node_linkables = list(linkable_node.exported_deps) + linkable_node = linkable_graph_node_map[node] # If the preferred linkage is `static` or `any` we need to link against the deps too. - # TODO(cjhopman): This code originally was as commented out below and the comment indicated that the - # intent was to not traverse in the second case if link style was shared, but at this point idk which - # behavior we actually want. - should_traverse_private_deps = False - if linkable_node.preferred_linkage == Linkage("static"): - should_traverse_private_deps = True - elif linkable_node.preferred_linkage == Linkage("any"): - should_traverse_private_deps = True - # should_traverse = link_style != Linkage("shared") + should_traverse_private_deps = linkable_node.preferred_linkage == Linkage("static") or linkable_node.preferred_linkage == Linkage("any") if should_traverse_private_deps: - node_linkables += linkable_node.deps - - return node_linkables + return linkable_node.all_deps + else: + return linkable_node.exported_deps # Get all potential linkable targets - linkables = breadth_first_traversal_by( + linkables = depth_first_traversal_by( linkable_graph_node_map, roots, get_potential_linkables, @@ -357,6 +402,7 @@ def get_filtered_labels_to_links_map( link_group_preferred_linkage, link_group_roots, pic_behavior, + link_group_mappings, ) linkable_map = {} @@ -365,15 +411,36 @@ def get_filtered_labels_to_links_map( # already. This avoids use adding the same link group lib multiple times, # for each of the possible multiple nodes that maps to it. link_group_added = {} + group_srcs = {} def add_link(target: Label, output_style: LibOutputStyle): linkable_map[target] = LinkGroupLinkInfo( - link_info = get_link_info(linkable_graph_node_map[target], output_style, prefer_stripped), + link_info = get_link_info(linkable_graph_node_map[target], output_style, prefer_stripped, prefer_optimized), output_style = output_style, - ) # buildifier: disable=uninitialized + link_name = target, + ) def add_link_group(target: Label, target_group: str): # If we've already added this link group to the link line, we're done. + + link_group_spec = link_groups.get(target_group, None) + if link_group_spec and link_group_spec.attrs.prohibit_file_duplicates and public_nodes and public_nodes.contains(target): + if target_group not in group_srcs: + group_srcs[target_group] = {} + target_group_srcs = group_srcs[target_group] + for src in linkable_graph_node_map[target].srcs: + if not isinstance(src, Artifact): + # "src" is either source file or source file with list of compilation flags. + # We do not handle the case where we have compilation flags attached to source files + # because it we don't know is link gonna fail or not. So we let user deal with linker errors if there are any. + continue + + previous_target = target_group_srcs.get(src, None) + if previous_target and previous_target != target: + fail("'{}' artifact included multiple times into '{}' link group. From '{}:{}' and '{}:{}'".format(src, target_group, target.package, target.name, previous_target.package, previous_target.name)) + else: + target_group_srcs[src] = target + if target_group in link_group_added: return @@ -392,50 +459,81 @@ def get_filtered_labels_to_links_map( linkable_map[target] = LinkGroupLinkInfo( link_info = get_link_info_from_link_infos(shared_link_infos), output_style = LibOutputStyle("shared_lib"), - ) # buildifier: disable=uninitialized + link_name = target_group, + ) filtered_groups = [None, NO_MATCH_LABEL, MATCH_ALL_LABEL] for target in linkables: node = linkable_graph_node_map[target] + target_link_group = link_group_mappings.get(target) + output_style = get_lib_output_style(link_strategy, link_group_preferred_linkage.get(target, node.preferred_linkage), pic_behavior) + output_style_for_static_strategy = get_lib_output_style(LinkStrategy("static_pic"), link_group_preferred_linkage.get(target, node.preferred_linkage), pic_behavior) + is_forced_shared_linkage = output_style_for_static_strategy == LibOutputStyle("shared_lib") - # Always link any shared dependencies - if output_style == LibOutputStyle("shared_lib"): + # We should always add force-static libs to the link. + is_force_static_lib = force_static_follows_dependents and node.preferred_linkage == Linkage("static") and not node.ignore_force_static_follows_dependents + + # If this belongs to the match all link group or the group currently being evaluated + matches_current_link_group = target_link_group == MATCH_ALL_LABEL or target_link_group == link_group + + if is_forced_shared_linkage: # filter out any dependencies to be discarded - group = link_groups.get(link_group_mappings.get(target)) + group = link_groups.get(target_link_group) if group != None and group.attrs.discard_group: continue # If this target is a link group root library, we # 1) don't propagate shared linkage down the tree, and # 2) use the provided link info in lieu of what's in the grph. - target_link_group = link_group_roots.get(target) - if target_link_group != None and target_link_group != link_group: - add_link_group(target, target_link_group) + root_link_group = link_group_roots.get(target) + if root_link_group != None and root_link_group != link_group: + add_link_group(target, root_link_group) else: add_link(target, LibOutputStyle("shared_lib")) - else: # static or static_pic - target_link_group = link_group_mappings.get(target) - - # Always add force-static libs to the link. - if (force_static_follows_dependents and - node.preferred_linkage == Linkage("static") and - not node.ignore_force_static_follows_dependents): - add_link(target, output_style) - elif not target_link_group and not link_group: - # Ungrouped linkable targets belong to the unlabeled executable - add_link(target, output_style) - elif is_executable_link and target_link_group == NO_MATCH_LABEL: - # Targets labeled NO_MATCH belong to the unlabeled executable - add_link(target, output_style) - elif target_link_group == MATCH_ALL_LABEL or target_link_group == link_group: - # If this belongs to the match all link group or the group currently being evaluated - add_link(target, output_style) - elif target_link_group not in filtered_groups: - add_link_group(target, target_link_group) - - return linkable_map + + else: + # Shared vs static linkage branches are similar, but separated for + # clarity and ease of debugging. + if link_strategy == LinkStrategy("shared"): + if (target_link_group and matches_current_link_group) or is_force_static_lib: + # Target linked statically if: + # 1. It belongs to current link group (unique symbols across graph) + # 2. It matches all link groups (can duplicate symbols across graph) + # 3. It forces static linkage (can duplicate symbols across graph) + add_link(target, output_style_for_static_strategy) + + elif not target_link_group or target_link_group == NO_MATCH_LABEL: + # Target directly linked dynamically if: + # 1. It doesn't belong to any link group + # 2. It belongs to NO_MATCH group + add_link(target, output_style) + + elif target_link_group not in filtered_groups: + # Targets linked through other link group dynamically if: + # 1. It matches other link group + add_link_group(target, target_link_group) + + else: # static or static_pic + # Always add force-static libs to the link. + if is_force_static_lib: + add_link(target, output_style) + elif not target_link_group and not link_group: + # Ungrouped linkable targets belong to the unlabeled executable + add_link(target, output_style) + elif is_executable_link and target_link_group == NO_MATCH_LABEL: + # Targets labeled NO_MATCH belong to the unlabeled executable + add_link(target, output_style) + elif matches_current_link_group: + # If this belongs to the match all link group or the group currently being evaluated + add_link(target, output_style) + elif target_link_group not in filtered_groups: + add_link_group(target, target_link_group) + + return FinalLabelsToLinks( + map = linkable_map, + ) # Find all link group libraries that are first order deps or exported deps of # the exectuble or another link group's libs @@ -498,7 +596,7 @@ def get_public_link_group_nodes( external_link_group_nodes.update( # get transitive exported deps - breadth_first_traversal_by( + depth_first_traversal_by( linkable_graph_node_map, external_link_group_nodes.list(), discover_link_group_linkables, @@ -528,6 +626,46 @@ def get_link_group_map_json(ctx: AnalysisContext, targets: list[TargetLabel]) -> json_map = ctx.actions.write_json(LINK_GROUP_MAP_DATABASE_FILENAME, sorted(targets)) return DefaultInfo(default_output = json_map) +def _find_all_relevant_roots( + specs: list[LinkGroupLibSpec], + link_group_mappings: dict[Label, str], # target label to link group name + roots: list[Label], + linkable_graph_node_map: dict[Label, LinkableNode]) -> dict[str, list[Label]]: + relevant_roots = {} + link_groups_for_full_traversal = set() # list[str] + + for spec in specs: + if spec.root != None: + relevant_roots[spec.group.name] = spec.root.deps + else: + roots_from_mappings, has_empty_root = _get_roots_from_mappings(spec, linkable_graph_node_map) + relevant_roots[spec.group.name] = roots_from_mappings + if has_empty_root: + link_groups_for_full_traversal.add(spec.group.name) + + def collect_and_traverse_roots(node_target: Label) -> list[Label]: + node = linkable_graph_node_map.get(node_target) + if node.preferred_linkage == Linkage("static") and not node.ignore_force_static_follows_dependents: + return node.all_deps + + node_link_group = link_group_mappings.get(node_target) + + if node_link_group == MATCH_ALL_LABEL: + # Add node into the list of roots for all link groups + for link_group in relevant_roots.keys(): + relevant_roots[link_group].append(node_target) + elif link_groups_for_full_traversal.contains(node_link_group) and node_link_group != NO_MATCH_LABEL: + relevant_roots[node_link_group].append(node_target) + return node.all_deps + + depth_first_traversal_by( + linkable_graph_node_map, + roots, + collect_and_traverse_roots, + ) + + return relevant_roots + def find_relevant_roots( link_group: [str, None] = None, linkable_graph_node_map: dict[Label, LinkableNode] = {}, @@ -535,22 +673,26 @@ def find_relevant_roots( roots: list[Label] = []): # Walk through roots looking for the first node which maps to the current # link group. - def collect_and_traverse_roots(roots, node_target): + + def collect_and_traverse_roots(roots, node_target: Label) -> list[Label] | None: node = linkable_graph_node_map.get(node_target) if node.preferred_linkage == Linkage("static") and not node.ignore_force_static_follows_dependents: - return node.deps + node.exported_deps + return node.all_deps + node_link_group = link_group_mappings.get(node_target) + if node_link_group == MATCH_ALL_LABEL: roots.append(node_target) - return [] - if node_link_group == link_group: + elif node_link_group == link_group: roots.append(node_target) - return [] - return node.deps + node.exported_deps + else: + return node.all_deps + + return None relevant_roots = [] - breadth_first_traversal_by( + depth_first_traversal_by( linkable_graph_node_map, roots, partial(collect_and_traverse_roots, relevant_roots), @@ -558,25 +700,46 @@ def find_relevant_roots( return relevant_roots +def _get_roots_from_mappings( + spec: LinkGroupLibSpec, + linkable_graph_node_map: dict[Label, LinkableNode]) -> (list[Label], bool): + roots = [] + has_empty_root = False + for mapping in spec.group.mappings: + # If there's no explicit root, this means we need to search the entire + # graph to find candidate nodes. + if not mapping.roots: + has_empty_root = True + elif spec.group.attrs.requires_root_node_exists: + # If spec requires root to always exist (default True), always include to traversal to fail hard if it is not in deps. + # Otherwise add to traversal only if we sure it is in deps graph. + roots.extend(mapping.roots) + else: + roots.extend([root for root in mapping.roots if root in linkable_graph_node_map]) + return (roots, has_empty_root) + +_CreatedLinkGroup = record( + linked_object = field(LinkedObject), + labels_to_links = field(FinalLabelsToLinks), +) + def _create_link_group( ctx: AnalysisContext, spec: LinkGroupLibSpec, - # The deps of the top-level executable. - executable_deps: list[Label] = [], - # Additional roots involved in the link. - other_roots: list[Label] = [], + roots: list[Label], + link_strategy: LinkStrategy, public_nodes: set_record = set(), linkable_graph_node_map: dict[Label, LinkableNode] = {}, linker_flags: list[typing.Any] = [], link_groups: dict[str, Group] = {}, link_group_mappings: dict[Label, str] = {}, link_group_preferred_linkage: dict[Label, Linkage] = {}, - link_strategy: LinkStrategy = LinkStrategy("static_pic"), link_group_libs: dict[str, ([Label, None], LinkInfos)] = {}, prefer_stripped_objects: bool = False, category_suffix: [str, None] = None, anonymous: bool = False, - allow_cache_upload = False) -> [LinkedObject, None]: + allow_cache_upload = False, + error_handler = None) -> _CreatedLinkGroup | None: """ Link a link group library, described by a `LinkGroupLibSpec`. This is intended to handle regular shared libs and e.g. Python extensions. @@ -596,10 +759,6 @@ def _create_link_group( get_ignore_undefined_symbols_flags(linker_type), )) - # Get roots to begin the linkable search. - # TODO(agallagher): We should use the groups "public" nodes as the roots. - roots = [] - has_empty_root = False if spec.root != None: # If there's a linkable root attached to the spec, use that to guide # linking, as that will contain things like private linker flags that @@ -608,33 +767,10 @@ def _create_link_group( spec.root.link_infos, prefer_stripped = prefer_stripped_objects, )) - roots.extend(spec.root.deps) - else: - for mapping in spec.group.mappings: - # If there's no explicit root, this means we need to search the entire - # graph to find candidate nodes. - if mapping.root == None: - has_empty_root = True - elif spec.group.attrs.requires_root_node_exists or mapping.root in linkable_graph_node_map: - # If spec requires root to always exist (default True), always include to traversal to fail hard if it is not in deps. - # Otherwise add to traversal only if we sure it is in deps graph. - roots.append(mapping.root) - - # If this link group has an empty mapping, we need to search everything - # -- even the additional roots -- to find potential nodes in the link - # group. - if has_empty_root: - roots.extend( - find_relevant_roots( - link_group = spec.group.name, - linkable_graph_node_map = linkable_graph_node_map, - link_group_mappings = link_group_mappings, - roots = executable_deps + other_roots, - ), - ) # Add roots... - filtered_labels_to_links_map = get_filtered_labels_to_links_map( + filtered_labels_to_links = get_filtered_labels_to_links_map( + public_nodes, linkable_graph_node_map, spec.group.name, link_groups, @@ -646,10 +782,11 @@ def _create_link_group( roots = roots, is_executable_link = False, prefer_stripped = prefer_stripped_objects, + prefer_optimized = spec.group.attrs.prefer_optimized_experimental, ) - inputs.extend(get_filtered_links(filtered_labels_to_links_map, public_nodes)) + inputs.extend(get_filtered_links(filtered_labels_to_links.map, public_nodes)) - if not filtered_labels_to_links_map and not spec.root: + if not filtered_labels_to_links.map and not spec.root: # don't create empty shared libraries return None @@ -666,10 +803,14 @@ def _create_link_group( enable_distributed_thinlto = False if anonymous else spec.group.attrs.enable_distributed_thinlto, link_execution_preference = LinkExecutionPreference("any"), allow_cache_upload = allow_cache_upload, + error_handler = error_handler, ), anonymous = anonymous, ) - return link_result.linked_object + return _CreatedLinkGroup( + linked_object = link_result.linked_object, + labels_to_links = filtered_labels_to_links, + ) def _stub_library( ctx: AnalysisContext, @@ -776,18 +917,24 @@ def _symbol_flags_for_link_groups( def create_link_groups( ctx: AnalysisContext, + public_nodes: set_record, + link_strategy: LinkStrategy, link_groups: dict[str, Group] = {}, link_group_specs: list[LinkGroupLibSpec] = [], executable_deps: list[Label] = [], other_roots: list[Label] = [], - root_link_group: [str, None] = None, linker_flags: list[typing.Any] = [], prefer_stripped_objects: bool = False, linkable_graph_node_map: dict[Label, LinkableNode] = {}, link_group_preferred_linkage: dict[Label, Linkage] = {}, link_group_mappings: [dict[Label, str], None] = None, anonymous: bool = False, - allow_cache_upload = False) -> _LinkedLinkGroups: + allow_cache_upload = False, + error_handler: [typing.Callable, None] = None) -> _LinkedLinkGroups: + # We linking libraries here so we need pic + if link_strategy == LinkStrategy("static"): + link_strategy = LinkStrategy("static_pic") + # Generate stubs first, so that subsequent links can link against them. link_group_shared_links = {} specs = [] @@ -806,26 +953,27 @@ def create_link_groups( anonymous = anonymous, ) + targets_consumed_by_link_groups = {} linked_link_groups = {} + link_groups_debug_info = {} undefined_symfiles = [] global_symfiles = [] - - public_nodes = get_public_link_group_nodes( - linkable_graph_node_map, + roots = _find_all_relevant_roots( + specs, link_group_mappings, executable_deps + other_roots, - root_link_group, + linkable_graph_node_map, ) for link_group_spec in specs: # NOTE(agallagher): It might make sense to move this down to be # done when we generated the links for the executable, so we can # handle the case when a link group can depend on the executable. - link_group_lib = _create_link_group( + created_link_group = _create_link_group( ctx = ctx, spec = link_group_spec, - executable_deps = executable_deps, - other_roots = other_roots, + roots = roots[link_group_spec.group.name], + link_strategy = link_strategy, linkable_graph_node_map = linkable_graph_node_map, public_nodes = public_nodes, linker_flags = ( @@ -846,12 +994,29 @@ def create_link_groups( category_suffix = "link_group", anonymous = anonymous, allow_cache_upload = allow_cache_upload, + error_handler = error_handler, ) - if link_group_lib == None: + if created_link_group == None: # the link group did not match anything, don't create shlib interface continue + link_group_lib = created_link_group.linked_object + + root_label = link_group_spec.root.label if link_group_spec.root else None + link_groups_debug_info[link_group_spec.name] = LinkGroupsDebugLinkableItem( + ordered_linkables = create_debug_linkable_entries(created_link_group.labels_to_links.map, root_label), + ) + + for (linked_target, link_info) in created_link_group.labels_to_links.map.items(): + if link_info.output_style != LibOutputStyle("shared_lib"): + # Remember all targets that were statically linked into link group + targets_consumed_by_link_groups[linked_target] = link_group_spec.group.name + + if link_group_spec.root: + # If link group has root it always being linked statically + targets_consumed_by_link_groups[link_group_spec.root.label] = link_group_spec.group.name + # On GNU, use shlib interfaces. if cxx_is_gnu(ctx): shlib_for_link = shared_library_interface( @@ -868,7 +1033,15 @@ def create_link_groups( linked_link_groups[link_group_spec.group.name] = _LinkedLinkGroup( artifact = link_group_lib, library = None if not link_group_spec.is_shared_lib else LinkGroupLib( - shared_libs = {link_group_spec.name: link_group_lib}, + shared_libs = SharedLibraries( + libraries = [ + create_shlib( + label = link_group_spec.label or ctx.label, + soname = link_group_spec.name, + lib = link_group_lib, + ), + ], + ), shared_link_infos = LinkInfos( default = wrap_link_info( link_info, @@ -904,6 +1077,8 @@ def create_link_groups( return _LinkedLinkGroups( libs = linked_link_groups, symbol_ldflags = symbol_ldflags, + libs_debug_info = link_groups_debug_info, + targets_consumed_by_link_groups = targets_consumed_by_link_groups, ) def get_transitive_deps_matching_labels( @@ -921,3 +1096,87 @@ def get_transitive_deps_matching_labels( continue nodes.append(dep) return nodes + +def build_shared_libs_for_symlink_tree( + use_link_groups: bool, + link_group_ctx: LinkGroupContext, + link_strategy: LinkStrategy, + shared_libraries: list[SharedLibrary], + extra_shared_libraries: list[SharedLibrary]) -> list[SharedLibrary]: + # Which targets we actually materialized as symlinks to link group + added_link_group_symlinks_libs = set() + symlink_tree_shared_libraries = [] + + def is_shlib_added(soname: Soname) -> bool: + return soname.is_str and added_link_group_symlinks_libs.contains(soname.ensure_str()) + + def add_shib(shlib: SharedLibrary): + if shlib.soname.is_str: + added_link_group_symlinks_libs.add(shlib.soname.ensure_str()) + symlink_tree_shared_libraries.append(shlib) + + if use_link_groups: + # When there are no matches for a pattern based link group, + # `link_group_mappings` will not have an entry associated with the lib. + for _name, link_group_lib in link_group_ctx.link_group_libs.items(): + for link_group_shlib in link_group_lib.shared_libs.libraries: + add_shib(link_group_shlib) + + for shlib in shared_libraries: + if is_shlib_added(shlib.soname): + # Shlib was already materialised as link group. + # This may happen if link group spec had this target + # as root. That will produce link group with exact + # .so file and dynamic linker will be satisfied. + continue + + if link_strategy == LinkStrategy("shared") and shlib.label in link_group_ctx.targets_consumed_by_link_groups: + link_group_link = create_link_group_link( + link_group_ctx.link_group_libs[link_group_ctx.targets_consumed_by_link_groups[shlib.label]], + shlib, + ) + add_shib(link_group_link) + + elif not use_link_groups or is_link_group_shlib(shlib.label, link_group_ctx): + add_shib(shlib) + + # Add in extra, rule-specific shared libs. + for extra_shlib in extra_shared_libraries: + if not is_shlib_added(extra_shlib.soname): + add_shib(extra_shlib) + + return symlink_tree_shared_libraries + +def create_link_group_link( + link_group_lib: LinkGroupLib, + consumed_library: SharedLibrary) -> SharedLibrary: + """ + This method implements symlinking from original .so to link group .so + for link groups in **dynamic linking**. + Current problem is: with following setup + ``` + :bin + | | + :A :C + | │ + └ :B ┘ + ``` + + If we put `:A` and `:B` to link group, `lib_c.so` will still add `lib_b.so` to `NEEDS` section. + But `lib_b.so` is gonna be grouped to `lib_a_b_lg.so` and there is no way to propagate this information to `lib_c.so`. + But we actually can have "stubs" for `lib_a.so` and `lib_b.so` that all point to actual `lib_a_b_lg.so`. + This approach satisfies dynamic linker. + """ + + if len(link_group_lib.shared_libs.libraries) != 1: + fail("This method should only be used with auto link groups that produce exactly one shared libray") + link_group_shlib = link_group_lib.shared_libs.libraries[0] + + return create_shlib( + lib = link_group_shlib.lib, + link_args = link_group_shlib.link_args, + shlib_deps = link_group_shlib.shlib_deps, + can_be_asset = link_group_shlib.can_be_asset, + soname = consumed_library.soname, # <=== we match original target soname that will symlink to link group + label = consumed_library.label, + ) diff --git a/prelude/cxx/link_groups_types.bzl b/prelude/cxx/link_groups_types.bzl new file mode 100644 index 00000000000..7c0037e5e5e --- /dev/null +++ b/prelude/cxx/link_groups_types.bzl @@ -0,0 +1,87 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load( + "@prelude//linking:link_info.bzl", + "LibOutputStyle", +) +load("@prelude//linking:types.bzl", "Linkage") +load(":groups_types.bzl", "Group", "Traversal") + +# These are targets or link groups that will be added to .linker.argsfile +# Targets will be expanded to .o files, link groups will be added to NEEDS +LinkGroupsDebugLinkableEntry = record( + name = field(Label | str), + output_style = field(LibOutputStyle), +) + +# This is info about single output unit. It is either a final binary or +# one of link groups. +LinkGroupsDebugLinkableItem = record( + ordered_linkables = field(list[LinkGroupsDebugLinkableEntry]), +) + +LinkGroupsDebugLinkInfo = record( + binary = field(LinkGroupsDebugLinkableItem), + libs = field(dict[str, LinkGroupsDebugLinkableItem]), +) + +LinkGroupInfo = provider( + fields = { + # Additional graphs needed to cover labels referenced by the groups above. + # This is useful in cases where the consumer of this provider won't already + # have deps covering these. + # NOTE(agallagher): We do this to maintain existing behavior w/ the + # standalone `link_group_map()` rule, but it's not clear if it's actually + # desirable behavior. + "graph": provider_field(typing.Any, default = None), # LinkableGraph + "groups": provider_field(dict[str, Group]), + "groups_hash": provider_field(int), + "mappings": provider_field(dict[Label, str]), + }, +) + +def link_group_inlined_map_attr(root_attr): + return attrs.list( + attrs.tuple( + # name + attrs.string(), + # list of mappings + attrs.list( + # a single mapping + attrs.tuple( + # root node + attrs.one_of(root_attr, attrs.list(root_attr)), + # traversal + attrs.enum(Traversal.values()), + # filters, either `None`, a single filter, or a list of filters + # (which must all match). + attrs.option(attrs.one_of(attrs.list(attrs.string()), attrs.string())), + # linkage + attrs.option(attrs.enum(Linkage.values())), + ), + ), + # attributes + attrs.option( + attrs.dict(key = attrs.string(), value = attrs.any(), sorted = False), + ), + ), + ) + +LINK_GROUP_MAP_ATTR = attrs.option( + attrs.one_of( + attrs.dep(providers = [LinkGroupInfo]), + link_group_inlined_map_attr( + # Inlined `link_group_map` will parse roots as `label`s, to avoid + # bloating deps w/ unrelated mappings (e.g. it's common to use + # a default mapping for all rules, which would otherwise add + # unrelated deps to them). + root_attr = attrs.option(attrs.label()), + ), + ), + default = None, +) diff --git a/prelude/cxx/link_types.bzl b/prelude/cxx/link_types.bzl index ddcac7c52bf..345e5d94cd4 100644 --- a/prelude/cxx/link_types.bzl +++ b/prelude/cxx/link_types.bzl @@ -32,11 +32,12 @@ LinkOptions = record( strip = bool, # A function/lambda which will generate the strip args using the ctx. strip_args_factory = [typing.Callable, None], - import_library = [Artifact, None], + import_library = Artifact | None, allow_cache_upload = bool, cxx_toolchain = [CxxToolchainInfo, None], # Force callers to use link_options() or merge_link_options() to create. __private_use_link_options_function_to_construct = None, + error_handler = [typing.Callable, None], ) def link_options( @@ -49,9 +50,10 @@ def link_options( identifier: [str, None] = None, strip: bool = False, strip_args_factory = None, - import_library: [Artifact, None] = None, + import_library: Artifact | None = None, allow_cache_upload: bool = False, - cxx_toolchain: [CxxToolchainInfo, None] = None) -> LinkOptions: + cxx_toolchain: [CxxToolchainInfo, None] = None, + error_handler: [typing.Callable, None] = None) -> LinkOptions: """ A type-checked constructor for LinkOptions because by default record constructors aren't typed. @@ -70,9 +72,10 @@ def link_options( allow_cache_upload = allow_cache_upload, cxx_toolchain = cxx_toolchain, __private_use_link_options_function_to_construct = None, + error_handler = error_handler, ) -# A marker instance to differentiate explicitly-passed None and a field tha +# A marker instance to differentiate explicitly-passed None and a field that # isn't provided in merge_link_options. _NotProvided = record() _NOT_PROVIDED = _NotProvided() @@ -110,4 +113,5 @@ def merge_link_options( allow_cache_upload = base.allow_cache_upload if allow_cache_upload == _NOT_PROVIDED else allow_cache_upload, cxx_toolchain = base.cxx_toolchain if cxx_toolchain == _NOT_PROVIDED else cxx_toolchain, __private_use_link_options_function_to_construct = None, + error_handler = base.error_handler, ) diff --git a/prelude/cxx/linker.bzl b/prelude/cxx/linker.bzl index 3bf86d3be5f..09ec0eea0c6 100644 --- a/prelude/cxx/linker.bzl +++ b/prelude/cxx/linker.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerInfo") +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerInfo", "LinkerType") load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") @@ -34,19 +34,19 @@ SharedLibraryFlagOverrides = record( ) LINKERS = { - "darwin": Linker( + LinkerType("darwin"): Linker( default_shared_library_extension = "dylib", default_shared_library_versioned_extension_format = "{}.dylib", shared_library_name_linker_flags_format = ["-install_name", "@rpath/{}"], shared_library_flags = ["-shared"], ), - "gnu": Linker( + LinkerType("gnu"): Linker( default_shared_library_extension = "so", default_shared_library_versioned_extension_format = "so.{}", shared_library_name_linker_flags_format = ["-Wl,-soname,{}"], shared_library_flags = ["-shared"], ), - "wasm": Linker( + LinkerType("wasm"): Linker( default_shared_library_extension = "wasm", default_shared_library_versioned_extension_format = "{}.wasm", shared_library_name_linker_flags_format = [], @@ -54,7 +54,7 @@ LINKERS = { # See https://github.com/WebAssembly/tool-conventions/blob/main/DynamicLinking.md#llvm-implementation shared_library_flags = ["-shared"], ), - "windows": Linker( + LinkerType("windows"): Linker( default_shared_library_extension = "dll", default_shared_library_versioned_extension_format = "dll", # NOTE(agallagher): I *think* windows doesn't support a flag to set the @@ -138,7 +138,7 @@ def get_default_shared_library_name(linker_info: LinkerInfo, label: Label): short_name = "{}_{}".format(_sanitize(label.package), _sanitize(label.name)) return get_shared_library_name(linker_info, short_name, apply_default_prefix = True) -def get_shared_library_name_linker_flags(linker_type: str, soname: str, flag_overrides: [SharedLibraryFlagOverrides, None] = None) -> list[str]: +def get_shared_library_name_linker_flags(linker_type: LinkerType, soname: str, flag_overrides: [SharedLibraryFlagOverrides, None] = None) -> list[str]: """ Arguments to pass to the linker to set the given soname. """ @@ -152,7 +152,7 @@ def get_shared_library_name_linker_flags(linker_type: str, soname: str, flag_ove for f in shared_library_name_linker_flags_format ] -def get_shared_library_flags(linker_type: str, flag_overrides: [SharedLibraryFlagOverrides, None] = None) -> list[ArgLike]: +def get_shared_library_flags(linker_type: LinkerType, flag_overrides: [SharedLibraryFlagOverrides, None] = None) -> list[ArgLike]: """ Arguments to pass to the linker to link a shared library. """ @@ -161,24 +161,24 @@ def get_shared_library_flags(linker_type: str, flag_overrides: [SharedLibraryFla return LINKERS[linker_type].shared_library_flags -def get_link_whole_args(linker_type: str, inputs: list[Artifact]) -> list[typing.Any]: +def get_link_whole_args(linker_type: LinkerType, inputs: list[Artifact]) -> list[typing.Any]: """ Return linker args used to always link all the given inputs. """ args = [] - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): args.append("-Wl,--whole-archive") args.extend(inputs) args.append("-Wl,--no-whole-archive") - elif linker_type == "darwin": + elif linker_type == LinkerType("darwin"): for inp in inputs: args.append("-Xlinker") args.append("-force_load") args.append("-Xlinker") args.append(inp) - elif linker_type == "windows": + elif linker_type == LinkerType("windows"): for inp in inputs: args.append(inp) args.append("/WHOLEARCHIVE:" + inp.basename) @@ -187,42 +187,42 @@ def get_link_whole_args(linker_type: str, inputs: list[Artifact]) -> list[typing return args -def get_objects_as_library_args(linker_type: str, objects: list[Artifact]) -> list[typing.Any]: +def get_objects_as_library_args(linker_type: LinkerType, objects: list[Artifact]) -> list[typing.Any]: """ Return linker args used to link the given objects as a library. """ args = [] - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): args.append("-Wl,--start-lib") args.extend(objects) args.append("-Wl,--end-lib") - elif linker_type == "windows": + elif linker_type == LinkerType("darwin") or linker_type == LinkerType("windows"): args.extend(objects) else: fail("Linker type {} not supported".format(linker_type)) return args -def get_ignore_undefined_symbols_flags(linker_type: str) -> list[str]: +def get_ignore_undefined_symbols_flags(linker_type: LinkerType) -> list[str]: """ Return linker args used to suppress undefined symbol errors. """ args = [] - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): args.append("-Wl,--allow-shlib-undefined") args.append("-Wl,--unresolved-symbols=ignore-all") - elif linker_type == "darwin": - args.append("-Wl,-flat_namespace,-undefined,suppress") + elif linker_type == LinkerType("darwin"): + args.append("-Wl,-undefined,dynamic_lookup") else: fail("Linker type {} not supported".format(linker_type)) return args -def get_no_as_needed_shared_libs_flags(linker_type: str) -> list[str]: +def get_no_as_needed_shared_libs_flags(linker_type: LinkerType) -> list[str]: """ Return linker args used to prevent linkers from dropping unused shared library dependencies from the e.g. DT_NEEDED tags of the link. @@ -230,49 +230,59 @@ def get_no_as_needed_shared_libs_flags(linker_type: str) -> list[str]: args = [] - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): args.append("-Wl,--no-as-needed") - elif linker_type == "darwin": + elif linker_type == LinkerType("darwin"): pass else: fail("Linker type {} not supported".format(linker_type)) return args -def get_output_flags(linker_type: str, output: Artifact) -> list[ArgLike]: - if linker_type == "windows": +def get_output_flags(linker_type: LinkerType, output: Artifact) -> list[ArgLike]: + if linker_type == LinkerType("windows"): return ["/Brepro", cmd_args(output.as_output(), format = "/OUT:{}")] else: return ["-o", output.as_output()] def get_import_library( ctx: AnalysisContext, - linker_type: str, - output_short_path: str) -> ([Artifact, None], list[ArgLike]): - if linker_type == "windows": + linker_type: LinkerType, + output_short_path: str) -> (Artifact | None, list[ArgLike]): + if linker_type == LinkerType("windows"): import_library = ctx.actions.declare_output(output_short_path + ".imp.lib") return import_library, [cmd_args(import_library.as_output(), format = "/IMPLIB:{}")] else: return None, [] +def get_deffile_flags( + ctx: AnalysisContext, + linker_type: LinkerType) -> list[ArgLike]: + if linker_type == LinkerType("windows") and ctx.attrs.deffile != None: + return [ + cmd_args(ctx.attrs.deffile, format = "/DEF:{}"), + ] + else: + return [] + def get_rpath_origin( - linker_type: str) -> str: + linker_type: LinkerType) -> str: """ Return the macro that runtime loaders resolve to the main executable at runtime. """ - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): return "$ORIGIN" - if linker_type == "darwin": + if linker_type == LinkerType("darwin"): return "@loader_path" fail("Linker type {} not supported".format(linker_type)) def is_pdb_generated( - linker_type: str, + linker_type: LinkerType, linker_flags: list[[str, ResolvedStringWithMacros]]) -> bool: - if linker_type != "windows": + if linker_type != LinkerType("windows"): return False for flag in reversed(linker_flags): flag = str(flag).upper() diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index 1df12523f42..0de7f4b705a 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -6,7 +6,11 @@ # of this source tree. load("@prelude//:local_only.bzl", "get_resolved_cxx_binary_link_execution_preference") -load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "LinkerType", + "PicBehavior", +) load( "@prelude//cxx:link.bzl", "CxxLinkResult", # @unused Used as a type @@ -20,7 +24,6 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "SharedLibLinkable", "get_lib_output_style", @@ -38,10 +41,16 @@ load( "linkable_deps", "linkable_graph", ) +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type + "create_shlib", +) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal_by", + "depth_first_traversal_by", "post_order_traversal", ) load("@prelude//utils:utils.bzl", "flatten", "value_or") @@ -118,7 +127,7 @@ OmnibusRootProduct = record( # The result of the omnibus link. OmnibusSharedLibraries = record( omnibus = field([CxxLinkResult, None], None), - libraries = field(dict[str, LinkedObject], {}), + libraries = field(list[SharedLibrary], []), roots = field(dict[Label, OmnibusRootProduct], {}), exclusion_roots = field(list[Label]), excluded = field(list[Label]), @@ -139,7 +148,8 @@ def get_roots(deps: list[Dependency]) -> dict[Label, LinkableRootInfo]: roots = {} for dep in deps: if LinkableRootInfo in dep: - roots[dep.label] = dep[LinkableRootInfo] + root = dep[LinkableRootInfo] + roots[root.label] = root return roots def get_excluded(deps: list[Dependency] = []) -> dict[Label, None]: @@ -151,11 +161,13 @@ def get_excluded(deps: list[Dependency] = []) -> dict[Label, None]: return excluded_nodes def create_linkable_root( + label: Label, link_infos: LinkInfos, name: [str, None] = None, deps: list[LinkableGraph | Dependency] = []) -> LinkableRootInfo: # Only include dependencies that are linkable. return LinkableRootInfo( + label = label, name = name, link_infos = link_infos, deps = linkable_deps(deps), @@ -192,7 +204,7 @@ def _link_deps( def find_deps(node: Label): return get_deps_for_link(link_infos[node], LinkStrategy("shared"), pic_behavior) - return breadth_first_traversal_by(link_infos, deps, find_deps) + return depth_first_traversal_by(link_infos, deps, find_deps) def _create_root( ctx: AnalysisContext, @@ -288,10 +300,7 @@ def _create_root( links = [LinkArgs(flags = extra_ldflags), LinkArgs(infos = inputs)], category_suffix = "omnibus_root", identifier = root.name or output, - # We prefer local execution because there are lot of cxx_link_omnibus_root - # running simultaneously, so while their overall load is reasonable, - # their peak execution load is very high. - link_execution_preference = LinkExecutionPreference("local"), + link_execution_preference = LinkExecutionPreference("any"), allow_cache_upload = allow_cache_upload, ), ) @@ -505,16 +514,16 @@ def _create_omnibus( # Add global symbols version script. # FIXME(agallagher): Support global symbols for darwin. - if linker_info.type != "darwin": + if linker_info.type != LinkerType("darwin"): global_sym_vers = _create_global_symbols_version_script( ctx, # Extract symbols from roots... root_products.values(), # ... and the shared libs from excluded nodes. [ - shared_lib.output + shared_lib.lib.output for label in spec.excluded - for shared_lib in spec.link_infos[label].shared_libs.values() + for shared_lib in spec.link_infos[label].shared_libs.libraries ], # Extract explicit global symbol names from flags in all body link args. global_symbols_link_args, @@ -685,7 +694,7 @@ def create_omnibus_libraries( # Create dummy omnibus dummy_omnibus = create_dummy_omnibus(ctx, extra_ldflags) - libraries = {} + libraries = [] root_products = {} # Link all root nodes against the dummy libomnibus lib. @@ -704,7 +713,13 @@ def create_omnibus_libraries( allow_cache_upload = True, ) if root.name != None: - libraries[root.name] = product.shared_library + libraries.append( + create_shlib( + soname = root.name, + lib = product.shared_library, + label = label, + ), + ) root_products[label] = product # If we have body nodes, then link them into the monolithic libomnibus.so. @@ -719,12 +734,17 @@ def create_omnibus_libraries( prefer_stripped_objects, allow_cache_upload = True, ) - libraries[_omnibus_soname(ctx)] = omnibus.linked_object + libraries.append( + create_shlib( + soname = _omnibus_soname(ctx), + lib = omnibus.linked_object, + label = ctx.label, + ), + ) # For all excluded nodes, just add their regular shared libs. for label in spec.excluded: - for name, lib in spec.link_infos[label].shared_libs.items(): - libraries[name] = lib + libraries.extend(spec.link_infos[label].shared_libs.libraries) return OmnibusSharedLibraries( omnibus = omnibus, diff --git a/prelude/cxx/prebuilt_cxx_library_group.bzl b/prelude/cxx/prebuilt_cxx_library_group.bzl index fa51631ef2f..6370818ec54 100644 --- a/prelude/cxx/prebuilt_cxx_library_group.bzl +++ b/prelude/cxx/prebuilt_cxx_library_group.bzl @@ -5,7 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "LinkerType", + "PicBehavior", +) load( "@prelude//cxx:preprocessor.bzl", "CPreprocessor", @@ -25,7 +29,6 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "SharedLibLinkable", "create_merged_link_info", @@ -45,6 +48,8 @@ load( "merge_shared_libraries", ) load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//unix:providers.bzl", "UnixEnv", "create_unix_env_info") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "flatten_dict") load(":cxx_context.bzl", "get_cxx_toolchain_info") @@ -112,7 +117,7 @@ def _parse_macro(arg: str) -> [(str, str), None]: def _get_static_link_infos( ctx: AnalysisContext, - linker_type: str, + linker_type: LinkerType, libs: list[Artifact], args: list[str]) -> LinkInfos: """ @@ -267,7 +272,7 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: args.extend(ctx.attrs.exported_preprocessor_flags) for inc_dir in ctx.attrs.include_dirs: args += ["-isystem", inc_dir] - preprocessor = CPreprocessor(relative_args = CPreprocessorArgs(args = args)) + preprocessor = CPreprocessor(args = CPreprocessorArgs(args = args)) inherited_pp_info = cxx_inherited_preprocessor_infos(exported_deps) providers.append(cxx_merge_cpreprocessors(ctx, [preprocessor], inherited_pp_info)) @@ -335,9 +340,10 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: )) # Propagate shared libraries up the tree. + shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in deps + exported_deps]), )) @@ -352,7 +358,7 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: exported_deps = exported_deps, preferred_linkage = preferred_linkage, link_infos = libraries, - shared_libs = solibs, + shared_libs = shared_libs, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, # TODO(cjhopman): this should be set to non-None default_soname = None, @@ -364,4 +370,15 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: providers.append(merge_link_group_lib_info(deps = deps + exported_deps)) + providers.append( + create_unix_env_info( + actions = ctx.actions, + env = UnixEnv( + label = ctx.label, + native_libs = [shared_libs], + ), + deps = deps + exported_deps, + ), + ) + return providers diff --git a/prelude/cxx/preprocessor.bzl b/prelude/cxx/preprocessor.bzl index 00cd190a67c..570119a6d09 100644 --- a/prelude/cxx/preprocessor.bzl +++ b/prelude/cxx/preprocessor.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:target_sdk_version.bzl", "get_target_sdk_version_flags") load( "@prelude//utils:utils.bzl", "flatten", @@ -32,7 +33,7 @@ SystemIncludeDirs = record( # Compiler type to infer correct include flags compiler_type = field(str), # Directories to be included via [-isystem | /external:I] [arglike things] - include_dirs = field(list["label_relative_path"]), + include_dirs = field(list[CellPath]), ) CPreprocessorArgs = record( @@ -42,25 +43,33 @@ CPreprocessorArgs = record( file_prefix_args = field(list[typing.Any], []), ) +HeaderUnit = record( + name = field(str), + module = field(Artifact), + include_dir = field(Artifact), + import_include = field(str | None), +) + # Note: Any generic attributes are assumed to be relative. CPreprocessor = record( # Relative path args to be used for build operations. - relative_args = field(CPreprocessorArgs, CPreprocessorArgs()), - # Absolute path args used to generate extra user-specific outputs. - absolute_args = field(CPreprocessorArgs, CPreprocessorArgs()), + args = field(CPreprocessorArgs, CPreprocessorArgs()), # Header specs headers = field(list[CHeader], []), # Those should be mutually exclusive with normal headers as per documentation raw_headers = field(list[Artifact], []), # Directories to be included via -I, [arglike things] - include_dirs = field(list["label_relative_path"], []), + include_dirs = field(list[CellPath], []), # Directories to be included via -isystem, [arglike things] system_include_dirs = field([SystemIncludeDirs, None], None), # Whether to compile with modules support uses_modules = field(bool, False), # Modular args to set when modules are in use, [arglike things] modular_args = field(list[typing.Any], []), - modulemap_path = field(typing.Any, None), + # Path to the modulemap which defines the API exposed to Swift + modulemap_path = field([cmd_args, None], None), + # Header units to load transitively and supporting args. + header_units = field(list[HeaderUnit], []), ) # Methods for transitive_sets must be declared prior to their use. @@ -68,13 +77,7 @@ CPreprocessor = record( def _cpreprocessor_args(pres: list[CPreprocessor]): args = cmd_args() for pre in pres: - args.add(pre.relative_args.args) - return args - -def _cpreprocessor_abs_args(pres: list[CPreprocessor]): - args = cmd_args() - for pre in pres: - args.add(pre.absolute_args.args) + args.add(pre.args.args) return args def _cpreprocessor_modular_args(pres: list[CPreprocessor]): @@ -83,16 +86,21 @@ def _cpreprocessor_modular_args(pres: list[CPreprocessor]): args.add(pre.modular_args) return args -def _cpreprocessor_file_prefix_args(pres: list[CPreprocessor]): +def _cpreprocessor_header_units_args(pres: list[CPreprocessor]): args = cmd_args() for pre in pres: - args.add(pre.relative_args.file_prefix_args) + for h in pre.header_units: + args.add(cmd_args(h.module, format = "-fmodule-file={}={{}}".format(h.name))) + args.add(cmd_args(h.include_dir, format = "-I{}")) + args.add(cmd_args(h.include_dir, format = "-fmodule-map-file={}/module.modulemap")) + if h.import_include: + args.add(["-include", h.import_include]) return args -def _cpreprocessor_abs_file_prefix_args(pres: list[CPreprocessor]): +def _cpreprocessor_file_prefix_args(pres: list[CPreprocessor]): args = cmd_args() for pre in pres: - args.add(pre.absolute_args.file_prefix_args) + args.add(pre.args.file_prefix_args) return args def _cpreprocessor_include_dirs(pres: list[CPreprocessor]): @@ -118,10 +126,9 @@ def _cpreprocessor_uses_modules(children: list[bool], pres: [list[CPreprocessor] # exported pp info and one for not-exported). CPreprocessorTSet = transitive_set( args_projections = { - "abs_args": _cpreprocessor_abs_args, - "abs_file_prefix_args": _cpreprocessor_abs_file_prefix_args, "args": _cpreprocessor_args, "file_prefix_args": _cpreprocessor_file_prefix_args, + "header_units_args": _cpreprocessor_header_units_args, "include_dirs": _cpreprocessor_include_dirs, "modular_args": _cpreprocessor_modular_args, }, @@ -149,15 +156,6 @@ CPreprocessorForTestsInfo = provider( }, ) -# Preprocessor flags -def cxx_attr_preprocessor_flags(ctx: AnalysisContext, ext: str) -> list[typing.Any]: - return ( - ctx.attrs.preprocessor_flags + - cxx_by_language_ext(ctx.attrs.lang_preprocessor_flags, ext) + - flatten(cxx_by_platform(ctx, ctx.attrs.platform_preprocessor_flags)) + - flatten(cxx_by_platform(ctx, cxx_by_language_ext(ctx.attrs.lang_platform_preprocessor_flags, ext))) - ) - def cxx_attr_exported_preprocessor_flags(ctx: AnalysisContext) -> list[typing.Any]: return ( ctx.attrs.exported_preprocessor_flags + @@ -192,7 +190,7 @@ def format_system_include_arg(path: cmd_args, compiler_type: str) -> list[cmd_ar else: return [cmd_args("-isystem"), path] -def cxx_exported_preprocessor_info(ctx: AnalysisContext, headers_layout: CxxHeadersLayout, project_root_file: Artifact, extra_preprocessors: list[CPreprocessor] = []) -> CPreprocessor: +def cxx_exported_preprocessor_info(ctx: AnalysisContext, headers_layout: CxxHeadersLayout, extra_preprocessors: list[CPreprocessor] = []) -> CPreprocessor: """ This rule's preprocessor info which is both applied to the compilation of its source and propagated to the compilation of dependent's sources. @@ -237,25 +235,28 @@ def cxx_exported_preprocessor_info(ctx: AnalysisContext, headers_layout: CxxHead include_dirs.extend([ctx.label.path.add(x) for x in ctx.attrs.public_include_directories]) system_include_dirs.extend([ctx.label.path.add(x) for x in ctx.attrs.public_system_include_directories]) - relative_args = _get_exported_preprocessor_args(ctx, exported_header_map, style, compiler_type, raw_headers, extra_preprocessors, None) - absolute_args = _get_exported_preprocessor_args(ctx, exported_header_map, style, compiler_type, raw_headers, extra_preprocessors, project_root_file) + args = _get_exported_preprocessor_args(ctx, exported_header_map, style, compiler_type, raw_headers, extra_preprocessors) modular_args = [] for pre in extra_preprocessors: modular_args.extend(pre.modular_args) + header_units = [] + for pre in extra_preprocessors: + header_units.extend(pre.header_units) + return CPreprocessor( - relative_args = CPreprocessorArgs(args = relative_args.args, file_prefix_args = relative_args.file_prefix_args), - absolute_args = CPreprocessorArgs(args = absolute_args.args, file_prefix_args = absolute_args.file_prefix_args), + args = CPreprocessorArgs(args = args.args, file_prefix_args = args.file_prefix_args), headers = exported_headers, raw_headers = raw_headers, include_dirs = include_dirs, system_include_dirs = SystemIncludeDirs(compiler_type = compiler_type, include_dirs = system_include_dirs), modular_args = modular_args, + header_units = header_units, ) -def _get_exported_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], style: HeaderStyle, compiler_type: str, raw_headers: list[Artifact], extra_preprocessors: list[CPreprocessor], project_root_file: [Artifact, None]) -> CPreprocessorArgs: - header_root = prepare_headers(ctx, headers, "buck-headers", project_root_file) +def _get_exported_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], style: HeaderStyle, compiler_type: str, raw_headers: list[Artifact], extra_preprocessors: list[CPreprocessor]) -> CPreprocessorArgs: + header_root = prepare_headers(ctx, headers, "buck-headers") # Process args to handle the `$(cxx-header-tree)` macro. args = [] @@ -278,23 +279,22 @@ def _get_exported_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Art if raw_headers: # NOTE(agallagher): It's a bit weird adding an "empty" arg, but this # appears to do the job (and not e.g. expand to `""`). - args.append(cmd_args().hidden(raw_headers)) + args.append(cmd_args(hidden = raw_headers)) # Append any extra preprocessor info passed in via the constructor params for pre in extra_preprocessors: - args.extend(pre.absolute_args.args if project_root_file else pre.relative_args.args) + args.extend(pre.args.args) return CPreprocessorArgs(args = args, file_prefix_args = file_prefix_args) def cxx_private_preprocessor_info( ctx: AnalysisContext, headers_layout: CxxHeadersLayout, - project_root_file: [Artifact, None], raw_headers: list[Artifact] = [], extra_preprocessors: list[CPreprocessor] = [], non_exported_deps: list[Dependency] = [], is_test: bool = False) -> (CPreprocessor, list[CPreprocessor]): - private_preprocessor = _cxx_private_preprocessor_info(ctx, headers_layout, raw_headers, extra_preprocessors, project_root_file) + private_preprocessor = _cxx_private_preprocessor_info(ctx, headers_layout, raw_headers, extra_preprocessors) test_preprocessors = [] if is_test: @@ -309,8 +309,7 @@ def _cxx_private_preprocessor_info( ctx: AnalysisContext, headers_layout: CxxHeadersLayout, raw_headers: list[Artifact], - extra_preprocessors: list[CPreprocessor], - project_root_file: [Artifact, None]) -> CPreprocessor: + extra_preprocessors: list[CPreprocessor]) -> CPreprocessor: """ This rule's preprocessor info which is only applied to the compilation of its source, and not propagated to dependents. @@ -352,23 +351,21 @@ def _cxx_private_preprocessor_info( all_raw_headers.extend(raw_headers) include_dirs.extend([ctx.label.path.add(x) for x in ctx.attrs.include_directories]) - relative_args = _get_private_preprocessor_args(ctx, header_map, compiler_type, all_raw_headers, None) - absolute_args = _get_private_preprocessor_args(ctx, header_map, compiler_type, all_raw_headers, project_root_file) + args = _get_private_preprocessor_args(ctx, header_map, compiler_type, all_raw_headers) return CPreprocessor( - relative_args = CPreprocessorArgs(args = relative_args.args, file_prefix_args = relative_args.file_prefix_args), - absolute_args = CPreprocessorArgs(args = absolute_args.args, file_prefix_args = absolute_args.file_prefix_args), + args = CPreprocessorArgs(args = args.args, file_prefix_args = args.file_prefix_args), headers = headers, raw_headers = all_raw_headers, include_dirs = include_dirs, uses_modules = uses_modules, ) -def _get_private_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], compiler_type: str, all_raw_headers: list[Artifact], project_root_file: [Artifact, None]) -> CPreprocessorArgs: +def _get_private_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], compiler_type: str, all_raw_headers: list[Artifact]) -> CPreprocessorArgs: # Create private header tree and propagate via args. - args = [] + args = get_target_sdk_version_flags(ctx) file_prefix_args = [] - header_root = prepare_headers(ctx, headers, "buck-private-headers", project_root_file) + header_root = prepare_headers(ctx, headers, "buck-private-headers") if header_root != None: args.extend(_format_include_arg("-I", header_root.include_path, compiler_type)) if header_root.file_prefix_args != None: @@ -379,7 +376,7 @@ def _get_private_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Arti if all_raw_headers: # NOTE(agallagher): It's a bit weird adding an "empty" arg, but this # appears to do the job (and not e.g. expand to `""`). - args.append(cmd_args().hidden(all_raw_headers)) + args.append(cmd_args(hidden = all_raw_headers)) return CPreprocessorArgs(args = args, file_prefix_args = file_prefix_args) diff --git a/prelude/cxx/shared_library_interface.bzl b/prelude/cxx/shared_library_interface.bzl index 3ac819b044c..3b7c58a0fc6 100644 --- a/prelude/cxx/shared_library_interface.bzl +++ b/prelude/cxx/shared_library_interface.bzl @@ -5,9 +5,21 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:artifact_tset.bzl", "ArtifactTSet", "make_artifact_tset", "project_artifacts") load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:preprocessor.bzl", "CPreprocessor", "CPreprocessorInfo") +load("@prelude//cxx:target_sdk_version.bzl", "get_target_triple") +load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type +load("@prelude//utils:lazy.bzl", "lazy") load(":cxx_context.bzl", "get_cxx_toolchain_info") load(":cxx_toolchain_types.bzl", "CxxToolchainInfo") +load(":headers.bzl", "CHeader") + +# The transitive artifacts of partial shared interface for a library. +# These need to be collected and merged to produce the final shared interface. +SharedInterfaceInfo = provider(fields = { + "interfaces": provider_field(ArtifactTSet), +}) def _shared_library_interface( ctx: AnalysisContext, @@ -80,3 +92,136 @@ def shared_library_interface( shared_lib = shared_lib, identifier = shared_lib.short_path, ) + +def generate_exported_symbols(ctx: AnalysisContext, exported_headers: list[CHeader], exported_preprocessor: CPreprocessor, transitive_preprocessor: list[CPreprocessorInfo]) -> Artifact: + # Use the c++ compiler to correctly generate c++ symbols. + compiler_info = get_cxx_toolchain_info(ctx).cxx_compiler_info + + # Collect the exported headers for this library and create a filelist for them. + # The exported headers are possibly hidden behind a modulemap, + # so cannot be fetched directly from exported_preprocessor. + filelist_headers = [] + for h in exported_headers: + filelist_headers.append({ + "path": h.artifact, + "type": "public", + }) + + # We need to collect all raw_headers that belong in a public include dir + include_dirs = ctx.attrs.public_include_directories + ctx.attrs.public_system_include_directories + include_dirs = [d if d.endswith("/") else d + "/" for d in include_dirs] + if len(include_dirs) > 0: + filelist_headers.extend([ + { + "path": h, + "type": "public", + } + for h in exported_preprocessor.raw_headers + if lazy.is_any(lambda d: h.short_path.startswith(d), include_dirs) + ]) + + filelist_contents = { + "headers": filelist_headers, + "version": "2", + } + filelist = ctx.actions.write_json( + paths.join("__tbd__", ctx.attrs.name + "_exported_headers.json"), + filelist_contents, + with_inputs = True, + ) + + # Run the shlib interface tool with the filelist and required args + output_file = ctx.actions.declare_output( + paths.join("__tbd__", ctx.attrs.name + ".exported_symbols.txt"), + ) + args = cmd_args(get_cxx_toolchain_info(ctx).linker_info.mk_shlib_intf[RunInfo]) + args.add([ + "installapi", + "--filelist", + filelist, + "-o", + output_file.as_output(), + "--target", + get_target_triple(ctx), + ]) + args.add(cmd_args(compiler_info.preprocessor_flags, prepend = "-Xparser")) + args.add(cmd_args(compiler_info.compiler_flags, prepend = "-Xparser")) + args.add(cmd_args(exported_preprocessor.args.args, prepend = "-Xparser")) + for ppinfo in transitive_preprocessor: + args.add(cmd_args(ppinfo.set.project_as_args("args"), prepend = "-Xparser")) + args.add(cmd_args(ppinfo.set.project_as_args("include_dirs"), prepend = "-Xparser")) + + # We need the targets compiler flags to pick up base flags that are applied + # in the macros instead of the toolchain for historical reasons. + args.add(cmd_args(ctx.attrs.compiler_flags, prepend = "-Xparser")) + + ctx.actions.run( + args, + category = "exported_symbols", + identifier = ctx.attrs.name, + ) + + return output_file + +def generate_tbd_with_symbols(ctx: AnalysisContext, soname: str, exported_symbol_inputs: ArtifactTSet, links: list[ArgLike]) -> Artifact: + # Use arglists for the inputs, otherwise we will overflow ARGMAX + symbol_args = project_artifacts(ctx.actions, [exported_symbol_inputs]) + input_argfile, _ = ctx.actions.write("__tbd__/" + ctx.attrs.name + ".symbols.filelist", symbol_args, allow_args = True) + + # Run the shlib interface tool with the merge command + tbd_file = ctx.actions.declare_output( + paths.join("__tbd__", ctx.attrs.name + ".merged.tbd"), + ) + args = cmd_args( + get_cxx_toolchain_info(ctx).linker_info.mk_shlib_intf[RunInfo], + "merge", + "-install_name", + "@rpath/" + soname, + "--symbols-filelist", + input_argfile, + "--target", + get_target_triple(ctx), + "-o", + tbd_file.as_output(), + hidden = symbol_args, + ) + + # Pass through the linker args as we need to honour any flags + # related to exported or unexported symbols. + for link_args in links: + args.add(cmd_args(link_args, prepend = "-Xparser")) + + ctx.actions.run( + args, + category = "generate_tbd", + identifier = ctx.attrs.name, + ) + return tbd_file + +def create_shared_interface_info(ctx: AnalysisContext, symbol_artifacts: list[Artifact], deps: list[Dependency]) -> [SharedInterfaceInfo, None]: + children = [d[SharedInterfaceInfo].interfaces for d in deps if SharedInterfaceInfo in d] + if len(symbol_artifacts) == 0 and len(children) == 0: + return None + + return SharedInterfaceInfo( + interfaces = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = symbol_artifacts, + children = children, + ), + ) + +def create_shared_interface_info_with_children(ctx: AnalysisContext, symbol_artifacts: list[Artifact], children: list[SharedInterfaceInfo]) -> [SharedInterfaceInfo, None]: + children = [d.interfaces for d in children] + if len(symbol_artifacts) == 0 and len(children) == 0: + return None + + return SharedInterfaceInfo( + interfaces = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = symbol_artifacts, + children = children, + ), + ) diff --git a/prelude/cxx/symbols.bzl b/prelude/cxx/symbols.bzl index d423b4450cd..dbd8ca84c9d 100644 --- a/prelude/cxx/symbols.bzl +++ b/prelude/cxx/symbols.bzl @@ -6,7 +6,11 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//os_lookup:defs.bzl", "OsLookup") @@ -17,6 +21,7 @@ def _extract_symbol_names( objects: list[Artifact], category: str, identifier: [str, None] = None, + defined_only: bool = False, undefined_only: bool = False, dynamic: bool = False, prefer_local: bool = False, @@ -31,6 +36,9 @@ def _extract_symbol_names( if not objects: fail("no objects provided") + if defined_only and undefined_only: + fail("only one of defined_only and undefined_only should be True") + nm = cxx_toolchain.binary_utilities_info.nm output = ctx.actions.declare_output(paths.join("__symbols__", name)) @@ -44,9 +52,13 @@ def _extract_symbol_names( nm_flags += "u" # darwin objects don't have dynamic symbol tables. - if dynamic and cxx_toolchain.linker_info.type != "darwin": + if dynamic and cxx_toolchain.linker_info.type != LinkerType("darwin"): nm_flags += "D" + # llvm-nm supports -U for this but gnu nm doesn't. + if defined_only: + nm_flags += " --defined-only" + is_windows = hasattr(ctx.attrs, "_exec_os_type") and ctx.attrs._exec_os_type[OsLookup].platform == "windows" if is_windows: @@ -56,8 +68,14 @@ def _extract_symbol_names( $lines = $result -split '`n' $lines = $lines | ForEach-Object {{ ($_ -split ' ')[1] }} $lines = $lines | ForEach-Object {{ ($_ -split '@')[0] }} + $lines = $lines | Where-Object {{ $_ -notmatch '__odr_asan_gen_.*' }} $lines = $lines | Sort-Object -Unique - [IO.File]::WriteAllLines('{{}}', $lines) + # Avoid a trailing newline for empty symbol lists + if ($lines.count -eq 0) {{ + [IO.File]::WriteAllText('{{}}', $lines) + }} else {{ + [IO.File]::WriteAllLines('{{}}', $lines) + }} }}""".format(nm_flags) ) symbol_extraction_args = [ @@ -73,6 +91,18 @@ def _extract_symbol_names( ' | cut -d" " -f2 ' + # Strip off ABI Version (@...) when using llvm-nm to keep compat with buck1 " | cut -d@ -f1 " + + # Remove ASAN ODR generated symbols: __odr_asan_gen_*. They are + # handled by a separate asan_dynamic_list.txt list of asan patterns. + # BUT MORE IMPORTANTLY, symbols like __odr_asan_XXX[abi:cxx11] force + # lld into a code path that repeatedly does a linear scan of all + # symbols for O(num_patterns_with_bracket * num_symbols) (because of + # the [] being treated as a glob pattern). This totally tanks link + # time for builds with sanitizers! Anecdotally, a binary with 3.7M + # symbols and 2K __odr_asan_XXX[abi:cxx11] can spend 6 mins + # processing patterns and 10s actually linking. We use sed instead + # of grep -v here to avoid an error exit code when there's no input + # symbols, which is not an error for us. + ' | sed "/__odr_asan_gen_.*/d"' + # Sort and dedup symbols. Use the `C` locale and do it in-memory to # make it significantly faster. CAUTION: if ten of these processes # run in parallel, they'll have cumulative allocations larger than RAM. @@ -177,6 +207,29 @@ def extract_symbol_names( **kwargs ) +def extract_defined_syms( + ctx: AnalysisContext, + cxx_toolchain: CxxToolchainInfo, + output: Artifact, + category_prefix: str, + prefer_local: bool = False, + anonymous: bool = False, + allow_cache_upload: bool = False) -> Artifact: + return extract_symbol_names( + ctx = ctx, + cxx_toolchain = cxx_toolchain, + name = output.short_path + ".defined_syms.txt", + objects = [output], + dynamic = True, + global_only = True, + defined_only = True, + category = "{}_defined_syms".format(category_prefix), + identifier = output.short_path, + prefer_local = prefer_local, + anonymous = anonymous, + allow_cache_upload = allow_cache_upload, + ) + def extract_undefined_syms( ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, @@ -237,7 +290,7 @@ def _create_symbols_file_from_script( """ all_symbol_files = actions.write(name + ".symbols", symbol_files) - all_symbol_files = cmd_args(all_symbol_files).hidden(symbol_files) + all_symbol_files = cmd_args(all_symbol_files, hidden = symbol_files) output = actions.declare_output(name) cmd = [ "/usr/bin/env", @@ -265,7 +318,7 @@ def get_undefined_symbols_args( category: [str, None] = None, identifier: [str, None] = None, prefer_local: bool = False) -> cmd_args: - if cxx_toolchain.linker_info.type == "gnu": + if cxx_toolchain.linker_info.type == LinkerType("gnu"): # linker script is only supported in gnu linkers linker_script = create_undefined_symbols_linker_script( ctx.actions, diff --git a/prelude/cxx/target_sdk_version.bzl b/prelude/cxx/target_sdk_version.bzl new file mode 100644 index 00000000000..df8c97ba445 --- /dev/null +++ b/prelude/cxx/target_sdk_version.bzl @@ -0,0 +1,102 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//cxx:cxx_context.bzl", "get_cxx_platform_info", "get_cxx_toolchain_info") + +def _version_is_greater(left: str, right: str) -> bool: + # Assumes version strings are in dotted format 1.2.4. + # After comparing components the longer remainder is + # considered larger. + left_components = left.split(".") + right_components = right.split(".") + for pair in zip(left_components, right_components): + x = int(pair[0]) + y = int(pair[1]) + if x < y: + return False + elif x > y: + return True + + return len(left_components) > len(right_components) + +def get_toolchain_target_sdk_version(ctx: AnalysisContext) -> [None, str]: + min_version = ctx.attrs.min_sdk_version + target_version = ctx.attrs.target_sdk_version + if min_version == None and target_version == None: + return None + elif min_version != None and target_version == None: + return min_version + elif min_version == None and target_version != None: + fail("Cannot set target_sdk_version without min_sdk_version") + elif _version_is_greater(min_version, target_version): + warning("Target SDK version {} is less than minimum supported version {}".format(target_version, min_version)) + return min_version + else: + return target_version + +def get_target_sdk_version(ctx: AnalysisContext) -> [None, str]: + toolchain_target_sdk_version = get_cxx_toolchain_info(ctx).target_sdk_version + target_sdk_version = getattr(ctx.attrs, "target_sdk_version", None) + if toolchain_target_sdk_version == None and target_sdk_version == None: + return None + elif toolchain_target_sdk_version != None and target_sdk_version == None: + return toolchain_target_sdk_version + elif toolchain_target_sdk_version == None and target_sdk_version != None: + return target_sdk_version + elif _version_is_greater(target_sdk_version, toolchain_target_sdk_version): + # The requested target_sdk_version on the toolchain must be >= + # the version set on the target, which should be the minimum + # allowed for this version to build. + fail("{} has target_sdk_version {}, which is larger than the toolchain target_sdk_version of {}".format( + ctx.label, + target_sdk_version, + toolchain_target_sdk_version, + )) + else: + return toolchain_target_sdk_version + +_PLATFORM_TARGET_TRIPLE_MAP = { + "appletvos": "{architecture}-apple-tvos{version}", + "appletvsimulator": "{architecture}-apple-tvos{version}-simulator", + "iphoneos": "{architecture}-apple-ios{version}", + "iphonesimulator": "{architecture}-apple-ios{version}-simulator", + "maccatalyst": "{architecture}-apple-ios{version}-macabi", + "macosx": "{architecture}-apple-macosx{version}", + "visionos": "{architecture}-apple-xros{version}", + "visionsimulator": "{architecture}-apple-xros{version}-simulator", + "watchos": "{architecture}-apple-watchos{version}", + "watchsimulator": "{architecture}-apple-watchos{version}-simulator", +} + +def _format_target_triple(ctx: AnalysisContext, version: str) -> str: + platform_info = get_cxx_platform_info(ctx) + platform_components = platform_info.name.split("-") + if platform_components[0] not in _PLATFORM_TARGET_TRIPLE_MAP: + fail("missing target triple for {}".format(platform_components[0])) + + triple_format_str = _PLATFORM_TARGET_TRIPLE_MAP[platform_components[0]] + return triple_format_str.format(architecture = platform_components[1], version = version) + +def get_target_triple(ctx: AnalysisContext) -> [None, str]: + target_sdk_version = get_target_sdk_version(ctx) + if target_sdk_version == None: + return None + + return _format_target_triple(ctx, target_sdk_version) + +def get_unversioned_target_triple(ctx: AnalysisContext) -> str: + return _format_target_triple(ctx, "") + +def get_target_sdk_version_flags(ctx: AnalysisContext) -> list[str]: + if not (hasattr(ctx.attrs, "_cxx_toolchain") or hasattr(ctx.attrs, "_apple_toolchain")): + return [] + + target_triple = get_target_triple(ctx) + if target_triple == None: + return [] + + return ["-target", target_triple] diff --git a/prelude/cxx/tools/BUCK.v2 b/prelude/cxx/tools/BUCK.v2 index 774d7171712..8c4f47914b8 100644 --- a/prelude/cxx/tools/BUCK.v2 +++ b/prelude/cxx/tools/BUCK.v2 @@ -1,7 +1,17 @@ -load(":defs.bzl", "cxx_hacks") +load("@prelude//utils:source_listing.bzl", "source_listing") +load(":defs.bzl", "cxx_hacks", "cxx_internal_tools") + +oncall("build_infra") + +source_listing() prelude = native +cxx_internal_tools( + name = "internal_tools", + visibility = ["PUBLIC"], +) + prelude.python_bootstrap_binary( name = "hmap_wrapper.py", main = "hmap_wrapper.py", @@ -14,33 +24,30 @@ prelude.command_alias( ], exe = ":hmap_wrapper.py", labels = ["buck2-only"], - visibility = ["PUBLIC"], ) prelude.python_bootstrap_binary( name = "make_comp_db", main = "make_comp_db.py", - visibility = ["PUBLIC"], ) prelude.python_bootstrap_binary( name = "dep_file_processor", main = "dep_file_processor.py", + visibility = ["PUBLIC"], deps = [ ":dep_file_processors", ], - visibility = ["PUBLIC"], ) prelude.python_bootstrap_library( name = "dep_file_processors", srcs = [ + "dep_file_utils.py", "makefile_to_dep_file.py", "show_headers_to_dep_file.py", "show_includes_to_dep_file.py", - "dep_file_utils.py", ], - visibility = ["PUBLIC"], ) prelude.python_bootstrap_binary( @@ -49,6 +56,29 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) +prelude.python_bootstrap_binary( + name = "remap_cwd", + main = "remap_cwd.py", +) + +prelude.python_bootstrap_binary( + name = "simple_ar", + main = "simple_ar.py", + visibility = ["PUBLIC"], +) + +prelude.python_bootstrap_binary( + name = "concatenate_diagnostics", + main = "concatenate_diagnostics.py", + visibility = ["PUBLIC"], +) + +prelude.python_bootstrap_binary( + name = "stderr_to_file", + main = "stderr_to_file.py", + visibility = ["PUBLIC"], +) + # Required to support the $(cxx-header-tree) macro cxx_hacks( name = "cxx_hacks", diff --git a/prelude/cxx/tools/concatenate_diagnostics.py b/prelude/cxx/tools/concatenate_diagnostics.py new file mode 100755 index 00000000000..d30c2cba2a6 --- /dev/null +++ b/prelude/cxx/tools/concatenate_diagnostics.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Usage: concatenate_diagnostics.py --out path/to/output.txt [path/to/input.txt...] +""" + +import argparse +from pathlib import Path +from typing import List, NamedTuple + + +class Args(NamedTuple): + out: Path + subtarget_diagnostics: List[Path] + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--out", type=Path, required=True) + parser.add_argument("subtarget_diagnostics", nargs="*", type=Path) + args = Args(**vars(parser.parse_args())) + + needs_blank_line = False + with open(args.out, "wb") as out: + for f in args.subtarget_diagnostics: + with open(f, "rb") as f: + content = f.read() + if len(content) == 0: + continue + if needs_blank_line: + out.write(b"\n") + out.write(content) + needs_blank_line = True + + +if __name__ == "__main__": + main() diff --git a/prelude/cxx/tools/defs.bzl b/prelude/cxx/tools/defs.bzl index fd4d9c41934..aac498141d9 100644 --- a/prelude/cxx/tools/defs.bzl +++ b/prelude/cxx/tools/defs.bzl @@ -5,6 +5,35 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxInternalTools", "DistLtoToolsInfo") + +def _cxx_internal_tools_impl(ctx: AnalysisContext) -> list[Provider]: + return [ + DefaultInfo(), + CxxInternalTools( + concatenate_diagnostics = ctx.attrs.concatenate_diagnostics[RunInfo], + dep_file_processor = ctx.attrs.dep_file_processor[RunInfo], + dist_lto = ctx.attrs.dist_lto[DistLtoToolsInfo], + hmap_wrapper = ctx.attrs.hmap_wrapper[RunInfo], + make_comp_db = ctx.attrs.make_comp_db[RunInfo], + remap_cwd = ctx.attrs.remap_cwd[RunInfo], + stderr_to_file = ctx.attrs.stderr_to_file[RunInfo], + ), + ] + +cxx_internal_tools = rule( + impl = _cxx_internal_tools_impl, + attrs = { + "concatenate_diagnostics": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//cxx/tools:concatenate_diagnostics")), + "dep_file_processor": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//cxx/tools:dep_file_processor")), + "dist_lto": attrs.default_only(attrs.dep(providers = [DistLtoToolsInfo], default = "prelude//cxx/dist_lto/tools:dist_lto_tools")), + "hmap_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//cxx/tools:hmap_wrapper")), + "make_comp_db": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//cxx/tools:make_comp_db")), + "remap_cwd": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//cxx/tools:remap_cwd")), + "stderr_to_file": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//cxx/tools:stderr_to_file")), + }, +) + def _cxx_hacks_impl(_ctx): return [DefaultInfo(), TemplatePlaceholderInfo( unkeyed_variables = { diff --git a/prelude/cxx/tools/hmap_wrapper.py b/prelude/cxx/tools/hmap_wrapper.py index e2c08254c28..3e04a7da0e7 100755 --- a/prelude/cxx/tools/hmap_wrapper.py +++ b/prelude/cxx/tools/hmap_wrapper.py @@ -30,16 +30,9 @@ def main(argv): if len(mapping_args) % 2 != 0: parser.error("mappings must be dest-source pairs") - project_root = None - if args.project_root_file: - with open(args.project_root_file) as file: - project_root = file.read().strip() - # Convert the hmap mappings passed on the command line to a dict. mappings = {} for src, dst in itertools.zip_longest(*([iter(mapping_args)] * 2)): - if project_root: - dst = f"{project_root}/{dst}" mappings[src] = dst # NOTE(agallagher): Add a mapping from the mapped path to itself. If diff --git a/prelude/cxx/tools/make_comp_db.py b/prelude/cxx/tools/make_comp_db.py index 7c41b93ef45..fa31b5f17b9 100755 --- a/prelude/cxx/tools/make_comp_db.py +++ b/prelude/cxx/tools/make_comp_db.py @@ -20,9 +20,29 @@ import json import shlex import sys +from typing import List -def gen(args): +def process_arguments(arguments: List[str]) -> List[str]: + """ + Process arguments to expand argsfiles. + """ + + combined_arguments = [] + for arg in arguments: + if arg.startswith("@"): + with open(arg[1:]) as argsfile: + # The argsfile's arguments are separated by newlines; we + # don't want those included in the argument list. + lines = [" ".join(shlex.split(line)) for line in argsfile.readlines()] + # Support nested argsfiles. + combined_arguments.extend(process_arguments(lines)) + else: + combined_arguments.append(arg) + return combined_arguments + + +def gen(args: argparse.Namespace) -> None: """ Generate a single compilation command in JSON form. """ @@ -30,24 +50,13 @@ def gen(args): entry = {} entry["file"] = args.directory + "/" + args.filename entry["directory"] = "." - - arguments = [] - for arg in args.arguments: - if arg.startswith("@"): - with open(arg[1:]) as argsfile: - for line in argsfile: - # The argsfile's arguments are separated by newlines; we - # don't want those included in the argument list. - arguments.append(" ".join(shlex.split(line))) - else: - arguments.append(arg) - entry["arguments"] = arguments + entry["arguments"] = process_arguments(args.arguments) json.dump(entry, args.output, indent=2) args.output.close() -def merge(args): +def merge(args: argparse.Namespace) -> None: """ Merge multiple compilation DB commands into a single DB. """ @@ -69,7 +78,7 @@ def merge(args): args.output.close() -def main(argv): +def main(argv: List[str]) -> int: parser = argparse.ArgumentParser() subparsers = parser.add_subparsers() @@ -89,6 +98,7 @@ def main(argv): args = parser.parse_args(argv[1:]) args.func(args) + return 0 sys.exit(main(sys.argv)) diff --git a/prelude/cxx/tools/makefile_to_dep_file.py b/prelude/cxx/tools/makefile_to_dep_file.py index ec173cac329..edf66fc74dc 100755 --- a/prelude/cxx/tools/makefile_to_dep_file.py +++ b/prelude/cxx/tools/makefile_to_dep_file.py @@ -8,7 +8,6 @@ # pyre-unsafe -import os import subprocess import sys @@ -88,7 +87,7 @@ def process_dep_file(args): Expects the src dep file to be the first argument, dst dep file to be the second argument, and the command to follow. """ - ret = subprocess.call(args[2:]) + ret = subprocess.call(args[2:], stdin=subprocess.DEVNULL) if ret == 0: rewrite_dep_file(args[0], args[1]) sys.exit(ret) diff --git a/prelude/cxx/tools/remap_cwd.py b/prelude/cxx/tools/remap_cwd.py new file mode 100755 index 00000000000..b2b75689ba7 --- /dev/null +++ b/prelude/cxx/tools/remap_cwd.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Usage: remap_cwd.py path/to/clang++ [args...] + +Runs `path/to/clang++ -ffile-prefix-map=$PWD= [args...]` +""" + +import os +import subprocess +import sys + + +if __name__ == "__main__": + cwd = os.getcwd() + # Add trailing slash + cwd = os.path.join(cwd, "") + + ret = subprocess.call( + [ + sys.argv[1], + f"-ffile-prefix-map={cwd}=", + *sys.argv[2:], + ], + ) + sys.exit(ret) diff --git a/prelude/cxx/tools/show_headers_to_dep_file.py b/prelude/cxx/tools/show_headers_to_dep_file.py index 7b668c46c79..037eb249374 100644 --- a/prelude/cxx/tools/show_headers_to_dep_file.py +++ b/prelude/cxx/tools/show_headers_to_dep_file.py @@ -8,6 +8,7 @@ # pyre-unsafe +import re import sys from subprocess import PIPE, run @@ -22,12 +23,11 @@ # the file itself, so we need the path to add it manually def process_show_headers_dep_file(output_path, cmd_args, input_file): ret = run(cmd_args, stderr=PIPE, encoding="utf-8") - if ret.returncode == 0: - parse_into_dep_file(ret.stderr, output_path, input_file) + parse_into_dep_file(ret.stderr, output_path, input_file, ret.returncode) sys.exit(ret.returncode) -def parse_into_dep_file(output, dst_path, input_file): +def parse_into_dep_file(output, dst_path, input_file, returncode): """ Convert stderr generated by clang to dep file. This will be a mix of output like: @@ -46,17 +46,24 @@ def parse_into_dep_file(output, dst_path, input_file): lines = output.splitlines() - deps = [] - for line in lines: - if line.startswith("."): - path = remove_leading_dots(line.replace(" ", "")) - if len(path) > 0: - deps.append(path.strip()) + if returncode == 0: + deps = [] + for line in lines: + if line.startswith("."): + path = remove_leading_dots(line.replace(" ", "")) + if len(path) > 0: + deps.append(path.strip()) + continue + print(line, file=sys.stderr) # This was a warning/error + + deps.append(input_file) + dep_file_utils.normalize_and_write_deps(deps, dst_path) + else: + for line in lines: + if re.match(r"^\.+ ", line): continue - print(line, file=sys.stderr) # This was a warning/error - deps.append(input_file) - dep_file_utils.normalize_and_write_deps(deps, dst_path) + print(line, file=sys.stderr) def remove_leading_dots(s): diff --git a/prelude/cxx/tools/simple_ar.py b/prelude/cxx/tools/simple_ar.py new file mode 100755 index 00000000000..c6ae5a75361 --- /dev/null +++ b/prelude/cxx/tools/simple_ar.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +For certain platform and linker versions (e.g. LLD 15+ for ELF), the +linker ignores the archive symbol table and accesses archive members +directly. When combined with thin archives, this produces trivial +archive files just embedding object paths, but ar (both GNU and LLVM) +still requires access to the object files to produce the archive, +resulting in unnecessary downloads and RE traffic. This implementation +instead embeds the paths directly without needing the actual files +present. The trade-offs are: +- Any problems in the object files will be detected at link time instead + of archive creation time. This should be very rare though. +- Since we can't access the object files, we store their sizes as zero + instead of the actual file size in the archive member headers. LLD for + ELF handles this correctly but I can't speak to other linkers. +""" + +import argparse +import os.path +import typing as t +from pathlib import Path + + +class ThinArchive: + MAGIC = b"!\n" + + def __init__(self, inputs: t.Sequence[Path], output: Path) -> None: + self._output = output + # llvm-ar always uses the long name member, and we follow suit for simplicity. + self._create_name_data(inputs, output.parent) + + def write(self) -> None: + with self._output.open("wb") as archive: + archive.write(self.MAGIC) + self._write_member_header( + archive, + name="//", + mtime="", + owner_id="", + group_id="", + mode="", + size=len(self._name_data), + ) + archive.write(self._name_data) + + for offset in self._name_offsets: + self._write_member_header( + archive, + name=f"/{offset}", + mtime="0", + owner_id="0", + group_id="0", + mode="644", + size=0, # as discussed in the file docblock + ) + + def _create_name_data(self, inputs: t.Sequence[Path], output_dir: Path) -> None: + self._name_data = bytearray() + self._name_offsets = [] + for input_path in inputs: + # Paths are stored relative to the archive. We use os.path.relpath instead + # of Pathlib.relative_to because the latter requires a common root. We use + # forward slashes everywhere for consistency and to mimic llvm-ar. + relative_path = Path(os.path.relpath(input_path, output_dir)).as_posix() + encoded = (relative_path + "/\n").encode() # add terminator + self._name_offsets.append(len(self._name_data)) + self._name_data.extend(encoded) + + if len(self._name_data) % 2 != 0: + self._name_data.extend(b"\n") # pad to an even size + + def _write_member_header( + self, + archive: t.BinaryIO, + *, + name: str, + mtime: str, + owner_id: str, + group_id: str, + mode: str, + size: int, + ) -> None: + # https://en.wikipedia.org/wiki/Ar_(Unix)#File_header + archive.write(self._encode_header_field(name, 16)) + archive.write(self._encode_header_field(mtime, 12)) + archive.write(self._encode_header_field(owner_id, 6)) + archive.write(self._encode_header_field(group_id, 6)) + archive.write(self._encode_header_field(mode, 8)) + archive.write(self._encode_header_field(str(size), 10)) + archive.write(b"`\n") # ending characters + + def _encode_header_field(self, value: str, length: int) -> bytes: + encoded = value.encode() + padding = length - len(encoded) + if padding < 0: + raise ValueError(f"Encoding of {str} is larger than {length} bytes") + + return encoded + b" " * padding + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Simple thin archive creator", fromfile_prefix_chars="@" + ) + parser.add_argument("modifiers", help="Operation and modifiers (limited support)") + parser.add_argument("output", type=Path, help="The output file") + parser.add_argument("inputs", nargs="+", help="The input files") + args = parser.parse_args() + + if args.output.exists(): + raise ValueError("Appending to an existing archive is unsupported") + + thin = False + for modifier in args.modifiers: + if modifier == "s": + raise ValueError("Archive symbol tables are unsupported") + elif modifier == "T": + thin = True + elif modifier not in "qcSD": + raise ValueError(f"Unsupported operation or modifier {modifier}") + + if not thin: + raise ValueError("Only thin archives are supported") + + # Strip any leading or trailing quotes (present in Windows argsfiles) + inputs = [Path(p.lstrip('"').rstrip('"')) for p in args.inputs] + archive = ThinArchive(inputs, args.output) + archive.write() + + +if __name__ == "__main__": + main() diff --git a/prelude/cxx/tools/stderr_to_file.py b/prelude/cxx/tools/stderr_to_file.py new file mode 100755 index 00000000000..1e11ee5d026 --- /dev/null +++ b/prelude/cxx/tools/stderr_to_file.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Usage: stderr_to_file.py --out=path/to/output path/to/clang++ [args...] +""" + +import argparse +import asyncio +import signal +import subprocess +import sys +from pathlib import Path +from typing import List, NamedTuple + + +# Exit code of `bash -c 'sleep 100'` +_INTERRUPTED = 128 + signal.SIGINT.value + + +class Args(NamedTuple): + out: Path + command: List[str] + + +class SubprocessProtocol(asyncio.SubprocessProtocol): + """Write subprocess stderr to both self.out and sys.stderr""" + + def __init__(self, out, exit_future): + self.out = out + self.exit_future = exit_future + self.pipe_closed = False + self.exited = False + + def pipe_data_received(self, fd, data): + if fd == sys.stderr.fileno(): + # Blocking write to file. This is buffered in a Python + # io.BufferedRandom. + self.out.write(data) + # Blocking unbuffered write to stderr. Our writes will be exactly as + # buffered as the subprocess's writes. + sys.stderr.buffer.write(data) + sys.stderr.flush() + + def pipe_connection_lost(self, fd, exc): + if fd == sys.stderr.fileno(): + self.pipe_closed = True + # Either of pipe_connection_lost() or process_exited() can be called + # before the other. Wait until both methods are called. + self._check_for_exit() + + def process_exited(self): + self.exited = True + # Either of pipe_connection_lost() or process_exited() can be called + # before the other. Wait until both methods are called. + self._check_for_exit() + + def _check_for_exit(self): + if self.pipe_closed and self.exited: + try: + self.exit_future.set_result(True) + except asyncio.InvalidStateError: + # Event loop has shut down. + pass + + +async def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--out", type=Path, required=True) + parser.add_argument("command", nargs=argparse.REMAINDER) + args = Args(**vars(parser.parse_args())) + + loop = asyncio.get_running_loop() + exit_future = asyncio.Future(loop=loop) + + with open(args.out, "wb+") as out: + transport, protocol = await loop.subprocess_exec( + lambda: SubprocessProtocol(out, exit_future), + *args.command, + stdin=None, # inherit + stdout=None, # inherit + stderr=subprocess.PIPE, + ) + await exit_future + transport.close() + + returncode = transport.get_returncode() + if returncode is None: + return _INTERRUPTED + else: + return returncode + + +try: + sys.exit(asyncio.run(main())) +except KeyboardInterrupt: + sys.exit(_INTERRUPTED) diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index b7f6111a10a..c86fb1c93c0 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -5,11 +5,29 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "AsCompilerInfo", "AsmCompilerInfo", "BinaryUtilitiesInfo", "CCompilerInfo", "CxxCompilerInfo", "CxxObjectFormat", "CxxPlatformInfo", "CxxToolchainInfo", "LinkerInfo", "LinkerType", "PicBehavior", "ShlibInterfacesMode", "StripFlagsInfo", "cxx_toolchain_infos") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "AsCompilerInfo", + "AsmCompilerInfo", + "BinaryUtilitiesInfo", + "CCompilerInfo", + "CxxCompilerInfo", + "CxxInternalTools", + "CxxObjectFormat", + "CxxPlatformInfo", + "CxxToolchainInfo", + "LinkerInfo", + "LinkerType", + "PicBehavior", + "ShlibInterfacesMode", + "StripFlagsInfo", + "cxx_toolchain_infos", +) load("@prelude//cxx:cxx_utility.bzl", "cxx_toolchain_allow_cache_upload_args") load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:headers.bzl", "HeaderMode") load("@prelude//cxx:linker.bzl", "is_pdb_generated") +load("@prelude//cxx:target_sdk_version.bzl", "get_toolchain_target_sdk_version") load( "@prelude//linking:link_info.bzl", "LinkStyle", @@ -31,18 +49,16 @@ def _cxx_toolchain_override(ctx): preprocessor = _pick_bin(ctx.attrs.as_compiler, base_as_info.preprocessor), preprocessor_type = base_as_info.preprocessor_type, preprocessor_flags = _pick(ctx.attrs.as_preprocessor_flags, base_as_info.preprocessor_flags), - dep_files_processor = base_as_info.dep_files_processor, ) asm_info = base_toolchain.asm_compiler_info if asm_info != None: asm_info = AsmCompilerInfo( compiler = _pick_bin(ctx.attrs.asm_compiler, asm_info.compiler), - compiler_type = asm_info.compiler_type, + compiler_type = _pick_raw(ctx.attrs.asm_compiler_type, asm_info.compiler_type), compiler_flags = _pick(ctx.attrs.asm_compiler_flags, asm_info.compiler_flags), preprocessor = _pick_bin(ctx.attrs.asm_compiler, asm_info.preprocessor), preprocessor_type = asm_info.preprocessor_type, preprocessor_flags = _pick(ctx.attrs.asm_preprocessor_flags, asm_info.preprocessor_flags), - dep_files_processor = asm_info.dep_files_processor, ) base_c_info = base_toolchain.c_compiler_info c_info = CCompilerInfo( @@ -52,7 +68,6 @@ def _cxx_toolchain_override(ctx): preprocessor = _pick_bin(ctx.attrs.c_compiler, base_c_info.preprocessor), preprocessor_type = base_c_info.preprocessor_type, preprocessor_flags = _pick(ctx.attrs.c_preprocessor_flags, base_c_info.preprocessor_flags), - dep_files_processor = base_c_info.dep_files_processor, allow_cache_upload = _pick_raw(ctx.attrs.c_compiler_allow_cache_upload, base_c_info.allow_cache_upload), ) base_cxx_info = base_toolchain.cxx_compiler_info @@ -63,20 +78,19 @@ def _cxx_toolchain_override(ctx): preprocessor = _pick_bin(ctx.attrs.cxx_compiler, base_cxx_info.preprocessor), preprocessor_type = base_cxx_info.preprocessor_type, preprocessor_flags = _pick(ctx.attrs.cxx_preprocessor_flags, base_cxx_info.preprocessor_flags), - dep_files_processor = base_cxx_info.dep_files_processor, allow_cache_upload = _pick_raw(ctx.attrs.cxx_compiler_allow_cache_upload, base_cxx_info.allow_cache_upload), ) base_linker_info = base_toolchain.linker_info - linker_type = ctx.attrs.linker_type if ctx.attrs.linker_type != None else base_linker_info.type + linker_type = LinkerType(ctx.attrs.linker_type) if ctx.attrs.linker_type != None else base_linker_info.type pdb_expected = is_pdb_generated(linker_type, ctx.attrs.linker_flags) if ctx.attrs.linker_flags != None else base_linker_info.is_pdb_generated - # This handles case when linker type is overriden to non-windows from + # This handles case when linker type is overridden to non-windows from # windows but linker flags are inherited. # When it's changed from non-windows to windows but flags are not changed, # we can't inspect base linker flags and disable PDB subtargets. # This shouldn't be a problem because to use windows linker after non-windows # linker flags should be changed as well. - pdb_expected = linker_type == "windows" and pdb_expected + pdb_expected = linker_type == LinkerType("windows") and pdb_expected shlib_interfaces = ShlibInterfacesMode(ctx.attrs.shared_library_interface_mode) if ctx.attrs.shared_library_interface_mode else None sanitizer_runtime_files = flatten([runtime_file[DefaultInfo].default_outputs for runtime_file in ctx.attrs.sanitizer_runtime_files]) if ctx.attrs.sanitizer_runtime_files != None else None linker_info = LinkerInfo( @@ -116,13 +130,13 @@ def _cxx_toolchain_override(ctx): use_archiver_flags = value_or(ctx.attrs.use_archiver_flags, base_linker_info.use_archiver_flags), force_full_hybrid_if_capable = value_or(ctx.attrs.force_full_hybrid_if_capable, base_linker_info.force_full_hybrid_if_capable), is_pdb_generated = pdb_expected, - produce_interface_from_stub_shared_library = value_or(ctx.attrs.produce_interface_from_stub_shared_library, base_linker_info.produce_interface_from_stub_shared_library), ) base_binary_utilities_info = base_toolchain.binary_utilities_info binary_utilities_info = BinaryUtilitiesInfo( nm = _pick_bin(ctx.attrs.nm, base_binary_utilities_info.nm), objcopy = _pick_bin(ctx.attrs.objcopy, base_binary_utilities_info.objcopy), + objdump = _pick_bin(ctx.attrs.objdump, base_binary_utilities_info.objdump), ranlib = _pick_bin(ctx.attrs.ranlib, base_binary_utilities_info.ranlib), strip = _pick_bin(ctx.attrs.strip, base_binary_utilities_info.strip), dwp = base_binary_utilities_info.dwp, @@ -130,15 +144,19 @@ def _cxx_toolchain_override(ctx): ) base_strip_flags_info = base_toolchain.strip_flags_info - strip_flags_info = StripFlagsInfo( - strip_debug_flags = _pick(ctx.attrs.strip_debug_flags, base_strip_flags_info.strip_debug_flags), - strip_non_global_flags = _pick(ctx.attrs.strip_non_global_flags, base_strip_flags_info.strip_non_global_flags), - strip_all_flags = _pick(ctx.attrs.strip_all_flags, base_strip_flags_info.strip_all_flags), - ) + if base_strip_flags_info: + strip_flags_info = StripFlagsInfo( + strip_debug_flags = _pick(ctx.attrs.strip_debug_flags, base_strip_flags_info.strip_debug_flags), + strip_non_global_flags = _pick(ctx.attrs.strip_non_global_flags, base_strip_flags_info.strip_non_global_flags), + strip_all_flags = _pick(ctx.attrs.strip_all_flags, base_strip_flags_info.strip_all_flags), + ) + else: + strip_flags_info = None return [ DefaultInfo(), ] + cxx_toolchain_infos( + internal_tools = ctx.attrs._internal_tools[CxxInternalTools], platform_name = ctx.attrs.platform_name if ctx.attrs.platform_name != None else ctx.attrs.base[CxxPlatformInfo].name, platform_deps_aliases = ctx.attrs.platform_deps_aliases if ctx.attrs.platform_deps_aliases != None else [], linker_info = linker_info, @@ -154,40 +172,40 @@ def _cxx_toolchain_override(ctx): hip_compiler_info = base_toolchain.hip_compiler_info, header_mode = HeaderMode(ctx.attrs.header_mode) if ctx.attrs.header_mode != None else base_toolchain.header_mode, headers_as_raw_headers_mode = base_toolchain.headers_as_raw_headers_mode, - mk_comp_db = _pick_bin(ctx.attrs.mk_comp_db, base_toolchain.mk_comp_db), - mk_hmap = _pick_bin(ctx.attrs.mk_hmap, base_toolchain.mk_hmap), - dist_lto_tools_info = base_toolchain.dist_lto_tools_info, use_dep_files = base_toolchain.use_dep_files, clang_remarks = base_toolchain.clang_remarks, + gcno_files = base_toolchain.gcno_files, clang_trace = base_toolchain.clang_trace, object_format = CxxObjectFormat(ctx.attrs.object_format) if ctx.attrs.object_format != None else base_toolchain.object_format, conflicting_header_basename_allowlist = base_toolchain.conflicting_header_basename_allowlist, strip_flags_info = strip_flags_info, pic_behavior = PicBehavior(ctx.attrs.pic_behavior) if ctx.attrs.pic_behavior != None else base_toolchain.pic_behavior.value, split_debug_mode = SplitDebugMode(value_or(ctx.attrs.split_debug_mode, base_toolchain.split_debug_mode.value)), + target_sdk_version = value_or(get_toolchain_target_sdk_version(ctx), base_toolchain.target_sdk_version), ) -def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): - dep_type = attrs.exec_dep if is_toolchain_rule else attrs.dep - base_dep_type = attrs.toolchain_dep if is_toolchain_rule else attrs.dep - return { +cxx_toolchain_override_registration_spec = RuleRegistrationSpec( + name = "cxx_toolchain_override", + impl = _cxx_toolchain_override, + attrs = { "additional_c_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "additional_cxx_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "archive_objects_locally": attrs.option(attrs.bool(), default = None), - "archiver": attrs.option(dep_type(providers = [RunInfo]), default = None), + "archiver": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "archiver_supports_argfiles": attrs.option(attrs.bool(), default = None), - "as_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "as_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "as_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "as_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "asm_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "asm_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "asm_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), + "asm_compiler_type": attrs.option(attrs.string(), default = None), "asm_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "base": base_dep_type(providers = [CxxToolchainInfo]), + "base": attrs.toolchain_dep(providers = [CxxToolchainInfo]), "bolt_enabled": attrs.option(attrs.bool(), default = None), - "c_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "c_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "c_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "c_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "cxx_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "cxx_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "cxx_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "cxx_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), "force_full_hybrid_if_capable": attrs.option(attrs.bool(), default = None), @@ -197,23 +215,23 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): "link_libraries_locally": attrs.option(attrs.bool(), default = None), "link_style": attrs.option(attrs.enum(LinkStyle.values()), default = None), "link_weight": attrs.option(attrs.int(), default = None), - "linker": attrs.option(dep_type(providers = [RunInfo]), default = None), + "linker": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "linker_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "linker_type": attrs.option(attrs.enum(LinkerType), default = None), - "llvm_link": attrs.option(dep_type(providers = [RunInfo]), default = None), + "linker_type": attrs.option(attrs.enum(LinkerType.values()), default = None), + "lipo": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "llvm_link": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "lto_mode": attrs.option(attrs.enum(LtoMode.values()), default = None), - "mk_comp_db": attrs.option(dep_type(providers = [RunInfo]), default = None), - "mk_hmap": attrs.option(dep_type(providers = [RunInfo]), default = None), - "mk_shlib_intf": attrs.option(dep_type(providers = [RunInfo]), default = None), - "nm": attrs.option(dep_type(providers = [RunInfo]), default = None), - "objcopy": attrs.option(dep_type(providers = [RunInfo]), default = None), + "min_sdk_version": attrs.option(attrs.string(), default = None), + "mk_shlib_intf": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "nm": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "objcopy": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "objdump": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "object_format": attrs.enum(CxxObjectFormat.values(), default = "native"), "pic_behavior": attrs.enum(PicBehavior.values(), default = "supported"), "platform_deps_aliases": attrs.option(attrs.list(attrs.string()), default = None), "platform_name": attrs.option(attrs.string(), default = None), "post_linker_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "produce_interface_from_stub_shared_library": attrs.option(attrs.bool(), default = None), - "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), + "ranlib": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "sanitizer_runtime_enabled": attrs.bool(default = False), "sanitizer_runtime_files": attrs.option(attrs.set(attrs.dep(), sorted = True, default = []), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform "shared_library_interface_mode": attrs.option(attrs.enum(ShlibInterfacesMode.values()), default = None), @@ -221,22 +239,13 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): "shared_library_name_format": attrs.option(attrs.string(), default = None), "shared_library_versioned_name_format": attrs.option(attrs.string(), default = None), "split_debug_mode": attrs.option(attrs.enum(SplitDebugMode.values()), default = None), - "strip": attrs.option(dep_type(providers = [RunInfo]), default = None), + "strip": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "strip_all_flags": attrs.option(attrs.list(attrs.arg()), default = None), "strip_debug_flags": attrs.option(attrs.list(attrs.arg()), default = None), "strip_non_global_flags": attrs.option(attrs.list(attrs.arg()), default = None), + "target_sdk_version": attrs.option(attrs.string(), default = None), "use_archiver_flags": attrs.option(attrs.bool(), default = None), - } | cxx_toolchain_allow_cache_upload_args() - -cxx_toolchain_override_registration_spec = RuleRegistrationSpec( - name = "cxx_toolchain_override", - impl = _cxx_toolchain_override, - attrs = _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule = False), -) - -cxx_toolchain_override_inheriting_target_platform_registration_spec = RuleRegistrationSpec( - name = "cxx_toolchain_override_inheriting_target_platform", - impl = _cxx_toolchain_override, - attrs = _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule = True), + "_internal_tools": attrs.default_only(attrs.exec_dep(providers = [CxxInternalTools], default = "prelude//cxx/tools:internal_tools")), + } | cxx_toolchain_allow_cache_upload_args(), is_toolchain_rule = True, ) diff --git a/prelude/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl index 9e7fcd17f28..d3e55c87bb6 100644 --- a/prelude/cxx/user/link_group_map.bzl +++ b/prelude/cxx/user/link_group_map.bzl @@ -7,14 +7,18 @@ load( "@prelude//cxx:groups.bzl", + "get_roots_from_mapping", "make_info_subtarget_providers", "parse_groups_definitions", ) load( "@prelude//cxx:link_groups.bzl", - "LinkGroupInfo", "build_link_group_info", ) +load( + "@prelude//cxx:link_groups_types.bzl", + "link_group_inlined_map_attr", +) load( "@prelude//linking:link_groups.bzl", "LinkGroupLibInfo", @@ -33,78 +37,20 @@ load( "SharedLibraryInfo", ) load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load("@prelude//decls/common.bzl", "Linkage", "Traversal") - -def _v1_attrs( - optional_root: bool = False, - # Whether we should parse `root` fields as a `dependency`, instead of a `label`. - root_is_dep: bool = True): - if root_is_dep: - attrs_root = attrs.dep(providers = [ - LinkGroupLibInfo, - LinkableGraph, - MergedLinkInfo, - SharedLibraryInfo, - ]) - else: - attrs_root = attrs.label() - - if optional_root: - attrs_root = attrs.option(attrs_root) - - return attrs.list( - attrs.tuple( - # name - attrs.string(), - # list of mappings - attrs.list( - # a single mapping - attrs.tuple( - # root node - attrs_root, - # traversal - attrs.enum(Traversal), - # filters, either `None`, a single filter, or a list of filters - # (which must all match). - attrs.option(attrs.one_of(attrs.list(attrs.string()), attrs.string())), - # linkage - attrs.option(attrs.enum(Linkage)), - ), - ), - # attributes - attrs.option( - attrs.dict(key = attrs.string(), value = attrs.any(), sorted = False), - ), - ), - ) - -def link_group_map_attr(): - v2_attrs = attrs.dep(providers = [LinkGroupInfo]) - return attrs.option( - attrs.one_of( - v2_attrs, - _v1_attrs( - optional_root = True, - # Inlined `link_group_map` will parse roots as `label`s, to avoid - # bloating deps w/ unrelated mappings (e.g. it's common to use - # a default mapping for all rules, which would otherwise add - # unrelated deps to them). - root_is_dep = False, - ), - ), - default = None, - ) +load("@prelude//utils:utils.bzl", "flatten") def _impl(ctx: AnalysisContext) -> list[Provider]: # Extract graphs from the roots via the raw attrs, as `parse_groups_definitions` # parses them as labels. + + deps = flatten([ + get_roots_from_mapping(mapping) + for entry in ctx.attrs.map + for mapping in entry[1] + ]) linkable_graph = create_linkable_graph( ctx, - deps = [ - mapping[0][LinkableGraph] - for entry in ctx.attrs.map - for mapping in entry[1] - ], + deps = [dep[LinkableGraph] for dep in deps], ) link_groups = parse_groups_definitions(ctx.attrs.map, lambda root: root.label) link_group_info = build_link_group_info(linkable_graph, link_groups) @@ -119,6 +65,15 @@ registration_spec = RuleRegistrationSpec( name = "link_group_map", impl = _impl, attrs = { - "map": _v1_attrs(), + "map": link_group_inlined_map_attr( + root_attr = attrs.dep( + providers = [ + LinkGroupLibInfo, + LinkableGraph, + MergedLinkInfo, + SharedLibraryInfo, + ], + ), + ), }, ) diff --git a/prelude/cxx/windows_resource.bzl b/prelude/cxx/windows_resource.bzl index b144c2988cd..17071680dc6 100644 --- a/prelude/cxx/windows_resource.bzl +++ b/prelude/cxx/windows_resource.bzl @@ -6,12 +6,31 @@ # of this source tree. load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout") +load("@prelude//cxx:preprocessor.bzl", "cxx_merge_cpreprocessors", "cxx_private_preprocessor_info") load("@prelude//linking:link_groups.bzl", "LinkGroupLibInfo") load("@prelude//linking:link_info.bzl", "LibOutputStyle", "LinkInfo", "LinkInfos", "ObjectsLinkable", "create_merged_link_info") load("@prelude//linking:linkable_graph.bzl", "create_linkable_graph") load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo") def windows_resource_impl(ctx: AnalysisContext) -> list[Provider]: + (own_non_exported_preprocessor_info, _) = cxx_private_preprocessor_info( + ctx = ctx, + headers_layout = cxx_get_regular_cxx_headers_layout(ctx), + raw_headers = ctx.attrs.raw_headers, + extra_preprocessors = [], + non_exported_deps = [], + is_test = False, + ) + + preprocessor = cxx_merge_cpreprocessors( + ctx, + [own_non_exported_preprocessor_info], + [], + ) + + headers_tag = ctx.actions.artifact_tag() + objects = [] toolchain = get_cxx_toolchain_info(ctx) @@ -20,10 +39,14 @@ def windows_resource_impl(ctx: AnalysisContext) -> list[Provider]: "__objects__", "{}.res".format(src.short_path), ) - rc_cmd = cmd_args(toolchain.rc_compiler_info.compiler) - rc_cmd.add(toolchain.rc_compiler_info.compiler_flags) - rc_cmd.add(cmd_args(rc_output.as_output(), format = "/fo{}")) - rc_cmd.add(src) + rc_cmd = cmd_args( + toolchain.rc_compiler_info.compiler, + toolchain.rc_compiler_info.compiler_flags, + cmd_args(rc_output.as_output(), format = "/fo{}"), + headers_tag.tag_artifacts(preprocessor.set.project_as_args("args")), + headers_tag.tag_artifacts(preprocessor.set.project_as_args("include_dirs")), + src, + ) ctx.actions.run( rc_cmd, @@ -34,10 +57,12 @@ def windows_resource_impl(ctx: AnalysisContext) -> list[Provider]: "__objects__", "{}.obj".format(src.short_path), ) - cvtres_cmd = cmd_args(toolchain.cvtres_compiler_info.compiler) - cvtres_cmd.add(toolchain.cvtres_compiler_info.compiler_flags) - cvtres_cmd.add(cmd_args(cvtres_output.as_output(), format = "/OUT:{}")) - cvtres_cmd.add(rc_output) + cvtres_cmd = cmd_args( + toolchain.cvtres_compiler_info.compiler, + toolchain.cvtres_compiler_info.compiler_flags, + cmd_args(cvtres_output.as_output(), format = "/OUT:{}"), + rc_output, + ) ctx.actions.run( cvtres_cmd, diff --git a/prelude/cxx/xcode.bzl b/prelude/cxx/xcode.bzl index 07c98c91221..5209955f8d2 100644 --- a/prelude/cxx/xcode.bzl +++ b/prelude/cxx/xcode.bzl @@ -10,9 +10,10 @@ load( "CompileArgsfile", # @unused Used as a type ) load( - "@prelude//cxx:compile.bzl", + "@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags", # @unused Used as a type ) +load("@prelude//ide_integrations:xcode.bzl", "XcodeDataInfoKeys") def cxx_populate_xcode_attributes( ctx, @@ -32,17 +33,17 @@ def cxx_populate_xcode_attributes( converted_srcs[src.file] = file_properties data = { - "argsfiles_by_ext": { + XcodeDataInfoKeys.ARGSFILES_BY_EXT: { ext: argsfile.file for ext, argsfile in argsfiles.items() }, - "headers": _get_artifacts_with_owners(ctx.attrs.headers), - "product_name": product_name, - "srcs": converted_srcs, + XcodeDataInfoKeys.HEADERS: _get_artifacts_with_owners(ctx.attrs.headers), + XcodeDataInfoKeys.PRODUCT_NAME: product_name, + XcodeDataInfoKeys.SRCS: converted_srcs, } if hasattr(ctx.attrs, "exported_headers"): - data["exported_headers"] = _get_artifacts_with_owners(ctx.attrs.exported_headers) + data[XcodeDataInfoKeys.EXPORTED_HEADERS] = _get_artifacts_with_owners(ctx.attrs.exported_headers) return data diff --git a/prelude/debugging/fdb.bxl b/prelude/debugging/fdb.bxl index e19aea22b0e..7be854219f9 100644 --- a/prelude/debugging/fdb.bxl +++ b/prelude/debugging/fdb.bxl @@ -84,7 +84,7 @@ def inspect(ctx: bxl.Context, actions: AnalysisActions, target: bxl.ConfiguredTa # when getting ExecInfo based on external action it's not possible to provide result as ExecInfo # in this case we'll return the artifact which is assumed to have ExecInfo serialized in it - if type(result) == "artifact": + if isinstance(result, Artifact): return result return actions.write_json("out.json", result) diff --git a/prelude/debugging/inspect_dbg_exec.bzl b/prelude/debugging/inspect_dbg_exec.bzl index 416f5b4869a..33dfafb01f6 100644 --- a/prelude/debugging/inspect_dbg_exec.bzl +++ b/prelude/debugging/inspect_dbg_exec.bzl @@ -18,8 +18,7 @@ def inspect_dbg_exec(ctx: bxl.Context, actions: AnalysisActions, target: bxl.Con providers = ctx.analysis(fbsource_alias_target).providers() fdb_helper = providers[RunInfo] fdb_helper_out = actions.declare_output("fdb_helper.json") - cmd = cmd_args(fdb_helper) - cmd.add(settings.args) + cmd = cmd_args(fdb_helper, settings.args) actions.run(cmd, category = "fdb_helper", env = {"FDB_OUTPUT_FILE": fdb_helper_out.as_output()}, local_only = True) result = actions.declare_output("final_out.json") @@ -47,7 +46,7 @@ def inspect_dbg_exec(ctx: bxl.Context, actions: AnalysisActions, target: bxl.Con actions.dynamic_output( dynamic = [fdb_helper_out], inputs = [], - outputs = [result], + outputs = [result.as_output()], f = build_exec_info, ) return result diff --git a/prelude/debugging/labels.bzl b/prelude/debugging/labels.bzl index adc5589dc7b..94f99b10c65 100644 --- a/prelude/debugging/labels.bzl +++ b/prelude/debugging/labels.bzl @@ -12,7 +12,7 @@ # For example: # Running "buck run //another:target" (or via using [RunInfo]) should produce `ExecInfo` as its stdout -# 3. If target has a label `dbg:info:ref=//another:target` we assume a presense of //another:target which we can inspect for the presense of relevant providers (see fdb.bxl) +# 3. If target has a label `dbg:info:ref=//another:target` we assume a presence of //another:target which we can inspect for the presence of relevant providers (see fdb.bxl) # This label indicates where to locate "[RunInfo]" which would output `ExecInfo` -compatible output DBG_INFO_EXEC = "dbg:info:exec" @@ -34,13 +34,6 @@ def get_info_ref(labels: list[str]) -> [str, None]: return result return None -def get_info_exec(labels: list[str]) -> [str, None]: - for label in labels: - result = _get_value_by_mark(DBG_INFO_EXEC, label) - if result: - return result - return None - def get_label_or_mark(label: str) -> str: for mark in [DBG_INFO_EXEC, DBG_INFO_REF]: if label.startswith(mark): diff --git a/prelude/debugging/types.bzl b/prelude/debugging/types.bzl index bfc9ea4e701..13149d2e684 100644 --- a/prelude/debugging/types.bzl +++ b/prelude/debugging/types.bzl @@ -17,7 +17,7 @@ load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type # Even though arguments are available in "bxl_ctx", when using ctx.dynamic_output it's not possible to access arguments from there # One way to work this around is by capturing an object in a closure and this type is used to carry the contract for this object ScriptSettings = record( - target = field("target_node"), + target = field(bxl.ConfiguredTargetNode), args = field(list[ArgLike], default = []), ) @@ -60,7 +60,7 @@ Custom = record( # Java DAP server requires this file in order to correctly locate classes in the source files # The integration with a tool is available as a part of "JVM" rules. (java/kotlin_library/binary/test) JavaInfo = record( - classmap_file = field([Artifact, None]), + classmap_file = field(Artifact | None), ) # Customizations that are understood by debugging tool diff --git a/prelude/decls/android_common.bzl b/prelude/decls/android_common.bzl index eee2f5d1b6b..6766684371e 100644 --- a/prelude/decls/android_common.bzl +++ b/prelude/decls/android_common.bzl @@ -17,14 +17,14 @@ def _manifest_apk_arg(): case is that the manifest will be in the same directory as the rule, in which case this will simply be `'AndroidManifest.xml'`, but it can also reference - an `android\\_manifest()`rule. + an `android_manifest()` rule. Prefer using `manifest_skeleton`, which performs merging automatically. Exactly one of `manifest` and `manifest_skeleton` must be set. """), "manifest_skeleton": attrs.option(attrs.source(), default = None, doc = """ Relative path to the skeleton Android manifest for the APK. - An `android\\_manifest()`will be created automatically to merge + An `android_manifest()` will be created automatically to merge all manifests from libraries and resources going into the app. The common case is that the manifest will be in the same directory as the rule, in which case this will simply be @@ -47,7 +47,7 @@ def _deps_apk_arg(): def _manifest_arg(): return { "manifest": attrs.option(attrs.source(), default = None, doc = """ - An optional [Android Manifest](http://developer.android.com/guide/topics/manifest/manifest-intro.html) for the to declare any permissions or intents it may need or want to handle. May either be a file or a `android\\_manifest()`target. + An optional [Android Manifest](http://developer.android.com/guide/topics/manifest/manifest-intro.html) for the to declare any permissions or intents it may need or want to handle. May either be a file or an `android_manifest()` target. """), } diff --git a/prelude/decls/android_rules.bzl b/prelude/decls/android_rules.bzl index eeed2d27d2e..eb148632ab5 100644 --- a/prelude/decls/android_rules.bzl +++ b/prelude/decls/android_rules.bzl @@ -10,6 +10,7 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) +load("@prelude//utils/clear_platform.bzl", "clear_platform_transition") load(":android_common.bzl", "android_common") load(":common.bzl", "AbiGenerationMode", "AnnotationProcessingTool", "ForkMode", "LogLevel", "OnDuplicateEntry", "SourceAbiVerificationMode", "TestType", "UnusedDependenciesAction", "buck", "prelude_rule") load(":core_rules.bzl", "TargetCpuType") @@ -76,10 +77,10 @@ android_aar = prelude_rule( # @unsorted-dict-items { "manifest_skeleton": attrs.source(doc = """ - The skeleton manifest file used to generate the final `AndroidManifest.xml` . May either be a file or a `android\\_manifest()`target. + The skeleton manifest file used to generate the final `AndroidManifest.xml` . May either be a file or an `android_manifest()` target. """), "build_config_values": attrs.list(attrs.string(), default = [], doc = """ - See the documentation on the values argument for `android\\_build\\_config()`. + See the documentation on the values argument for `android_build_config()`. """), "include_build_config_class": attrs.bool(default = False, doc = """ Whether to include the `BuildConfig` class files in the final .aar file. Needs @@ -119,27 +120,23 @@ android_aar = prelude_rule( "excluded_java_deps": attrs.list(attrs.dep(), default = []), "extra_arguments": attrs.list(attrs.string(), default = []), "extra_kotlinc_arguments": attrs.list(attrs.string(), default = []), - "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "friend_paths": attrs.list(attrs.dep(), default = []), "java_version": attrs.option(attrs.string(), default = None), - "javac": attrs.option(attrs.source(), default = None), - "kotlin_compiler_plugins": attrs.dict(key = attrs.source(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "labels": attrs.list(attrs.string(), default = []), "language": attrs.option(attrs.enum(JvmLanguage), default = None), "licenses": attrs.list(attrs.source(), default = []), "manifest": attrs.option(attrs.source(), default = None), "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), - "native_library_merge_code_generator": attrs.option(attrs.dep(), default = None), + "native_library_merge_code_generator": attrs.option(attrs.exec_dep(), default = None), "native_library_merge_glue": attrs.option(attrs.dep(), default = None), - "native_library_merge_localized_symbols": attrs.option(attrs.set(attrs.string(), sorted = True), default = None), "native_library_merge_map": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.regex()), sorted = False), default = None), "native_library_merge_sequence": attrs.option(attrs.list(attrs.any()), default = None), "native_library_merge_sequence_blocklist": attrs.option(attrs.list(attrs.regex()), default = None), "never_mark_as_unused_dependency": attrs.option(attrs.bool(), default = None), "on_unused_dependencies": attrs.option(attrs.enum(UnusedDependenciesAction), default = None), - "plugins": attrs.list(attrs.dep(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), + "relinker_extra_deps": attrs.list(attrs.dep(), default = [], doc = "Deps statically linked to every native lib by the relinker."), "relinker_whitelist": attrs.list(attrs.regex(), default = []), "required_for_source_only_abi": attrs.bool(default = False), "resource_union_package": attrs.option(attrs.string(), default = None), @@ -152,9 +149,8 @@ android_aar = prelude_rule( "srcs": attrs.list(attrs.source(), default = []), "target": attrs.option(attrs.string(), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } - ), + ) | jvm_common.plugins() | jvm_common.javac(), ) android_app_modularity = prelude_rule( @@ -165,7 +161,7 @@ android_app_modularity = prelude_rule( attrs = ( # @unsorted-dict-items { - "application_module_blacklist": attrs.option(attrs.list(attrs.query()), default = None), + "application_module_blacklist": attrs.option(attrs.list(attrs.dep()), default = None), "application_module_configs": attrs.dict(key = attrs.string(), value = attrs.list(attrs.dep()), sorted = False, default = {}), "application_module_dependencies": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.string()), sorted = False), default = None), "contacts": attrs.list(attrs.string(), default = []), @@ -196,7 +192,7 @@ android_binary = prelude_rule( "allow_r_dot_java_in_secondary_dex": attrs.bool(default = False), "allowed_duplicate_resource_types": attrs.list(attrs.enum(RType), default = []), "android_sdk_proguard_config": attrs.option(attrs.enum(SdkProguardType), default = None), - "application_module_blacklist": attrs.option(attrs.list(attrs.query()), default = None), + "application_module_blacklist": attrs.option(attrs.list(attrs.dep()), default = None), "application_module_configs": attrs.dict(key = attrs.string(), value = attrs.list(attrs.dep()), sorted = False, default = {}), "application_module_dependencies": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.string()), sorted = False), default = None), "asset_compression_algorithm": attrs.option(attrs.enum(CompressionAlgorithm), default = None), @@ -237,7 +233,6 @@ android_binary = prelude_rule( "module_manifest_skeleton": attrs.option(attrs.source(), default = None), "native_library_merge_code_generator": attrs.option(attrs.dep(), default = None), "native_library_merge_glue": attrs.option(attrs.dep(), default = None), - "native_library_merge_localized_symbols": attrs.option(attrs.set(attrs.string(), sorted = True), default = None), "native_library_merge_map": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.regex()), sorted = False), default = None), "native_library_merge_sequence": attrs.option(attrs.list(attrs.any()), default = None), "native_library_merge_sequence_blocklist": attrs.option(attrs.list(attrs.regex()), default = None), @@ -249,6 +244,7 @@ android_binary = prelude_rule( "package_asset_libraries": attrs.bool(default = False), "package_type": attrs.enum(PackageType, default = "debug"), "packaged_locales": attrs.list(attrs.string(), default = []), + "packaging_options": attrs.dict(key = attrs.string(), value = attrs.list(attrs.string()), default = {}), "post_filter_resources_cmd": attrs.option(attrs.arg(), default = None), "preprocess_java_classes_bash": attrs.option(attrs.arg(), default = None), "preprocess_java_classes_cmd": attrs.option(attrs.arg(), default = None), @@ -256,6 +252,7 @@ android_binary = prelude_rule( "primary_dex_patterns": attrs.list(attrs.string(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), "proguard_jvm_args": attrs.list(attrs.string(), default = []), + "relinker_extra_deps": attrs.list(attrs.dep(), default = [], doc = "Deps statically linked to every native lib by the relinker."), "relinker_whitelist": attrs.list(attrs.regex(), default = []), "resource_compression": attrs.enum(ResourceCompressionMode, default = "disabled"), "resource_filter": attrs.list(attrs.string(), default = []), @@ -277,7 +274,7 @@ android_build_config = prelude_rule( docs = """ An `android_build_config()` rule is used to generate a `BuildConfig` class with global configuration variables - that other `android\\_library()`rules can compile against. + that other `android_library()` rules can compile against. Currently, the only variable exposed by `BuildConfig` is a global `boolean` named `DEBUG`, much like the `BuildConfig.java` generated by the official Android @@ -290,7 +287,7 @@ android_build_config = prelude_rule( be replaced with a new version where: * The fields will be set to literal values (i.e., constant expressions). * The `boolean BuildConfig.DEBUG` field will correspond to - that of the `package_type` argument to the `android\\_binary()`rule + that of the `package_type` argument to the `android_binary()` rule that is packaging it. @@ -301,7 +298,7 @@ android_build_config = prelude_rule( examples = """ Here is an example of an `android_build_config()` rule that is transitively included by both *debug* and *release* versions - of an `android\\_binary()`rule. The value + of an `android_binary()` rule. The value of `com.example.pkg.BuildConfig.DEBUG` will be different in each APK even though they both transitively depend on the same `:build_config` rule. @@ -392,14 +389,14 @@ android_build_config = prelude_rule( generated `BuildConfig.java` file. Like `DEBUG`, the values will be non-constant-expressions that evaluate to the value specified in the file at compilation time. - To override the values in an APK, specify build\\_config\\_values or build\\_config\\_values\\_file in `android\\_binary()`. + To override the values in an APK, specify build\\_config\\_values or build\\_config\\_values\\_file in `android_binary()`. """), "values_file": attrs.option(attrs.source(), default = None, doc = """ Optional path to a file that defines additional fields (and values) that should be declared in the generated `BuildConfig.java` file. Like `DEBUG`, the values will be non-constant-expressions that evaluate to the value specified in the file at compilation time. - To override the values in an APK, specify build\\_config\\_values or build\\_config\\_values\\_file in `android\\_binary()`. + To override the values in an APK, specify build\\_config\\_values or build\\_config\\_values\\_file in `android_binary()`. Note that values\\_file can be a generated file, as can build\\_config\\_values\\_file as demonstrated in the example below. @@ -428,7 +425,7 @@ android_bundle = prelude_rule( "allow_r_dot_java_in_secondary_dex": attrs.bool(default = False), "allowed_duplicate_resource_types": attrs.list(attrs.enum(RType), default = []), "android_sdk_proguard_config": attrs.option(attrs.enum(SdkProguardType), default = None), - "application_module_blacklist": attrs.option(attrs.list(attrs.query()), default = None), + "application_module_blacklist": attrs.option(attrs.list(attrs.dep()), default = None), "application_module_configs": attrs.dict(key = attrs.string(), value = attrs.list(attrs.dep()), sorted = False, default = {}), "application_module_dependencies": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.string()), sorted = False), default = None), "asset_compression_algorithm": attrs.option(attrs.enum(CompressionAlgorithm), default = None), @@ -470,7 +467,6 @@ android_bundle = prelude_rule( "module_manifest_skeleton": attrs.option(attrs.source(), default = None), "native_library_merge_code_generator": attrs.option(attrs.dep(), default = None), "native_library_merge_glue": attrs.option(attrs.dep(), default = None), - "native_library_merge_localized_symbols": attrs.option(attrs.set(attrs.string(), sorted = True), default = None), "native_library_merge_map": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.regex()), sorted = False), default = None), "native_library_merge_sequence": attrs.option(attrs.list(attrs.any()), default = None), "native_library_merge_sequence_blocklist": attrs.option(attrs.list(attrs.regex()), default = None), @@ -482,6 +478,7 @@ android_bundle = prelude_rule( "package_asset_libraries": attrs.bool(default = False), "package_type": attrs.enum(PackageType, default = "debug"), "packaged_locales": attrs.list(attrs.string(), default = []), + "packaging_options": attrs.dict(key = attrs.string(), value = attrs.list(attrs.string()), default = {}), "post_filter_resources_cmd": attrs.option(attrs.arg(), default = None), "preprocess_java_classes_bash": attrs.option(attrs.arg(), default = None), "preprocess_java_classes_cmd": attrs.option(attrs.arg(), default = None), @@ -489,6 +486,7 @@ android_bundle = prelude_rule( "primary_dex_patterns": attrs.list(attrs.string(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), "proguard_jvm_args": attrs.list(attrs.string(), default = []), + "relinker_extra_deps": attrs.list(attrs.dep(), default = [], doc = "Deps statically linked to every native lib by the relinker."), "relinker_whitelist": attrs.list(attrs.regex(), default = []), "resource_compression": attrs.enum(ResourceCompressionMode, default = "disabled"), "resource_filter": attrs.list(attrs.string(), default = []), @@ -522,7 +520,7 @@ android_instrumentation_apk = prelude_rule( when running the test. """, examples = """ - Here is an example of an `android_instrumentation_apk()` rule that tests a `android_binary()`, and depends on a test + Here is an example of an `android_instrumentation_apk()` rule that tests an `android_binary()`, and depends on a test package. @@ -563,8 +561,7 @@ android_instrumentation_apk = prelude_rule( { "apk": attrs.dep(doc = """ APK build target, which should be used for the instrumentation APK. - Can be either a `android\\_binary()`or a - `apk\\_genrule()`. + Can be either an `android_binary()` or an `apk_genrule()`. """), } | android_common.deps_apk_arg() | @@ -579,6 +576,9 @@ android_instrumentation_apk = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "use_split_dex": attrs.option(attrs.bool(), default = None), "primary_dex_patterns": attrs.list(attrs.string(), default = []), + "preprocess_java_classes_bash": attrs.option(attrs.arg(), default = None), + "preprocess_java_classes_cmd": attrs.option(attrs.arg(), default = None), + "preprocess_java_classes_deps": attrs.list(attrs.dep(), default = []), } ), ) @@ -586,7 +586,7 @@ android_instrumentation_apk = prelude_rule( android_instrumentation_test = prelude_rule( name = "android_instrumentation_test", docs = """ - A `android_instrumentation_test()` rule is used to define + An `android_instrumentation_test()` rule is used to define apks that should be used to run Android instrumentation tests. """, examples = """ @@ -626,19 +626,34 @@ android_instrumentation_test = prelude_rule( further = None, attrs = ( # @unsorted-dict-items + buck.inject_test_env_arg() | { "apk": attrs.dep(doc = """ - The APK containing the tests. Can be an `android\\_binary()`, - an `apk\\_genrule()`or an `android\\_instrumentation\\_apk()`. + The APK containing the tests. Can be an `android_binary()`, + an `apk_genrule()` or an `android_instrumentation_apk()`. """), } | buck.test_label_arg() | buck.test_rule_timeout_ms() | { + "clear_package_data": attrs.bool(default = False, doc = """ + Runs `pm clear` on the app and test packages before the test run if set to True. + """), "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), + "disable_animations": attrs.bool(default = False, doc = """ + Disables animations on the emulator if set to True. + """), + "collect_tombstones": attrs.bool(default = False, doc = """ + Checks whether the test generated any tombstones, and downloads them from the emulator if true. + """), + "record_video": attrs.bool(default = False, doc = "Record video of test run and collect it as TRA"), + "log_extractors": attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False, default = {}), "env": attrs.dict(key = attrs.string(), value = attrs.arg(), sorted = False, default = {}), "licenses": attrs.list(attrs.source(), default = []), + "_android_emulators": attrs.option(attrs.transition_dep(cfg = clear_platform_transition, providers = [LocalResourceInfo]), default = None, doc = """ + If provided, local resource of "android_emulators" type will be required to run this test locally and this target will be used to manage it. If omitted, local resource of "android_emulators" type will be ignored even if requested by the test runner. + """), } ), ) @@ -653,7 +668,7 @@ android_library = prelude_rule( """, examples = """ An `android_library` rule used in concert with an - `android\\_resource()`rule. + `android_resource()` rule. This would be a common arrangement for a standard Android Library project as defined by @@ -711,12 +726,6 @@ android_library = prelude_rule( Overrides the value in "target\\_level" in the "java" section of `.buckconfig`. """), - "javac": attrs.option(attrs.source(), default = None, doc = """ - Specifies the Java compiler program to use for this rule. - The value is a source path (e.g., //foo/bar:bar). - Overrides the value in "javac" in the "tools" section - of `.buckconfig`. - """), "extra_arguments": attrs.list(attrs.string(), default = [], doc = """ List of additional arguments to pass into the Java compiler. These arguments follow the ones specified in `.buckconfig`. @@ -740,6 +749,9 @@ android_library = prelude_rule( jvm_common.source_only_abi_deps() | jvm_common.required_for_source_only_abi() | jvm_common.k2() | + jvm_common.kotlin_compiler_plugins() | + jvm_common.incremental() | + jvm_common.javac() | { "remove_classes": attrs.list(attrs.regex(), default = [], doc = """ List of classes to remove from the output jar. It only removes classes from the target's own @@ -750,11 +762,9 @@ android_library = prelude_rule( "annotation_processors": attrs.list(attrs.string(), default = []), "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "friend_paths": attrs.list(attrs.dep(), default = []), "java_version": attrs.option(attrs.string(), default = None), "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), - "kotlin_compiler_plugins": attrs.dict(key = attrs.source(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "labels": attrs.list(attrs.string(), default = []), "language": attrs.option(attrs.enum(JvmLanguage), default = None), "licenses": attrs.list(attrs.source(), default = []), @@ -762,16 +772,14 @@ android_library = prelude_rule( "maven_coords": attrs.option(attrs.string(), default = None), "never_mark_as_unused_dependency": attrs.option(attrs.bool(), default = None), "on_unused_dependencies": attrs.option(attrs.enum(UnusedDependenciesAction), default = None), - "plugins": attrs.list(attrs.dep(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), "resource_union_package": attrs.option(attrs.string(), default = None), "resources_root": attrs.option(attrs.source(), default = None), "runtime_deps": attrs.list(attrs.dep(), default = []), "source_abi_verification_mode": attrs.option(attrs.enum(SourceAbiVerificationMode), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } - ), + ) | jvm_common.plugins(), ) android_manifest = prelude_rule( @@ -779,7 +787,7 @@ android_manifest = prelude_rule( docs = """ An `android_manifest()` rule is used to generate an [Android - Manifest](http://developer.android.com/guide/topics/manifest/manifest-intro.html) to be used by `android\\_binary()`and `android\\_aar()`rules. This + Manifest](http://developer.android.com/guide/topics/manifest/manifest-intro.html) to be used by `android_binary()` and `android_aar()` rules. This rule takes a skeleton manifest, and merges it with manifests found in any deps. """, examples = """ @@ -823,20 +831,20 @@ android_manifest = prelude_rule( ``` - You could also use a `genrule()`to generate the manifest file and reference the - `build target`in the `skeleton` argument. + You could also use a `genrule()` to generate the manifest file and reference the + `build target` in the `skeleton` argument. """, further = None, attrs = ( # @unsorted-dict-items { "skeleton": attrs.source(doc = """ - Either a `build target`or a path to a file representing the manifest that + Either a `build target` or a path to a file representing the manifest that will be merged with any manifests associated with this rule's `deps`. """), "deps": attrs.list(attrs.dep(), default = [], doc = """ A collection of dependencies that includes android\\_library rules. The manifest files of the - `android\\_library()`rules will be filtered out to become dependent source files for + `android_library()` rules will be filtered out to become dependent source files for the manifest. """), "contacts": attrs.list(attrs.string(), default = []), @@ -866,7 +874,7 @@ android_prebuilt_aar = prelude_rule( docs = """ An `android_prebuilt_aar()` rule takes an `.aar` file and makes it available as an Android dependency. As expected, - an `android\\_binary()`that transitively depends on + an `android_binary()` that transitively depends on an `android_prebuilt_aar()` will include its contents in the generated APK. @@ -1060,23 +1068,34 @@ apk_genrule = prelude_rule( { "apk": attrs.option(attrs.dep(), default = None, doc = """ The input `android_binary()` rule. The path to the APK can be - accessed with the `$APK` shell variable. + accessed with the `$APK` shell variable. Only one of `apk` or + `aab` can be provided. """), + "keystore": attrs.option(attrs.dep(), default = None), } | genrule_common.srcs_arg() | genrule_common.cmd_arg() | genrule_common.bash_arg() | genrule_common.cmd_exe_arg() | genrule_common.type_arg() | + genrule_common.weight_arg() | { "out": attrs.option(attrs.string(), default = None, doc = """ - This argument only exists for historical reasons and it does not have any - effect. It will be deprecated and removed in the future. + The name of the output file or directory. The complete path to this + argument is provided to the shell command through + the `OUT` environment variable. Only one of `out` + or `outs` may be present. + + For an apk_genrule the output should be a '.apk' or '.aab' file. """), } | genrule_common.environment_expansion_separator() | { - "aab": attrs.option(attrs.dep(), default = None), + "aab": attrs.option(attrs.dep(), default = None, doc = """ + The input `android_binary()` rule. The path to the AAB can be + accessed with the `$AAB` shell variable. Only one of `apk` or + `aab` can be provided. + """), "cacheable": attrs.option(attrs.bool(), default = None), "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -1157,7 +1176,7 @@ keystore = prelude_rule( docs = """ A `keystore()` contains the data for a key pair created by the `keytool` executable that comes - with the JDK. This is a required input for an `android\\_binary()`rule. + with the JDK. This is a required input for an `android_binary()` rule. """, examples = None, further = None, @@ -1319,7 +1338,7 @@ prebuilt_native_library = prelude_rule( (i.e., `.so` files) for Android. """, examples = """ - Most of the time, a `prebuilt_native_library` is private to the `android\\_library()`that uses it: + Most of the time, a `prebuilt_native_library` is private to the `android_library()` that uses it: ``` @@ -1384,13 +1403,14 @@ robolectric_test = prelude_rule( further = None, attrs = ( # @unsorted-dict-items + buck.inject_test_env_arg() | { "robolectric_runtime_dependency": attrs.option(attrs.source(), default = None, doc = """ Robolectric only runs in offline mode with buck. Specify the relative directory containing all the jars Robolectric uses at runtime. """), "robolectric_manifest": attrs.source(doc = """ - An [Android Manifest](http://developer.android.com/guide/topics/manifest/manifest-intro.html) for the rule to declare any permissions or intents it may need or want to handle. May either be a file or a `android\\_manifest()`target. + An [Android Manifest](http://developer.android.com/guide/topics/manifest/manifest-intro.html) for the rule to declare any permissions or intents it may need or want to handle. May either be a file or an `android_manifest()` target. """), "extra_kotlinc_arguments": attrs.list(attrs.string(), default = [], doc = """ List of additional arguments to pass into the Kotlin compiler. @@ -1409,13 +1429,11 @@ robolectric_test = prelude_rule( "exported_deps": attrs.list(attrs.dep(), default = []), "exported_provided_deps": attrs.list(attrs.dep(), default = []), "extra_arguments": attrs.list(attrs.string(), default = []), - "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "fork_mode": attrs.enum(ForkMode, default = "none"), "friend_paths": attrs.list(attrs.dep(), default = []), + "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), "java_version": attrs.option(attrs.string(), default = None), "java": attrs.option(attrs.dep(), default = None), - "javac": attrs.option(attrs.source(), default = None), - "kotlin_compiler_plugins": attrs.dict(key = attrs.source(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "labels": attrs.list(attrs.string(), default = []), "language": attrs.option(attrs.enum(JvmLanguage), default = None), "licenses": attrs.list(attrs.source(), default = []), @@ -1426,7 +1444,6 @@ robolectric_test = prelude_rule( "maven_coords": attrs.option(attrs.string(), default = None), "never_mark_as_unused_dependency": attrs.option(attrs.bool(), default = None), "on_unused_dependencies": attrs.option(attrs.enum(UnusedDependenciesAction), default = None), - "plugins": attrs.list(attrs.dep(), default = []), "preferred_density_for_binary_resources": attrs.option(attrs.string(), default = None), "proguard_config": attrs.option(attrs.source(), default = None), "provided_deps": attrs.list(attrs.dep(), default = []), @@ -1456,8 +1473,12 @@ robolectric_test = prelude_rule( "used_as_dependency_deprecated_do_not_use": attrs.bool(default = False), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), "vm_args": attrs.list(attrs.arg(), default = []), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), - } | jvm_common.k2() | + } | + jvm_common.k2() | + jvm_common.incremental() | + jvm_common.plugins() | + jvm_common.kotlin_compiler_plugins() | + jvm_common.javac() | re_test_common.test_args() ), ) diff --git a/prelude/decls/common.bzl b/prelude/decls/common.bzl index e3e839bb04c..b598801c57a 100644 --- a/prelude/decls/common.bzl +++ b/prelude/decls/common.bzl @@ -20,7 +20,7 @@ prelude_rule = record( further = field([str, None], None), attrs = field(dict[str, Attr]), impl = field([typing.Callable, None], None), - uses_plugins = field([list["PluginKind"], None], None), + uses_plugins = field([list[plugins.PluginKind], None], None), ) AbiGenerationMode = ["unknown", "class", "source", "migrating_to_source_only", "source_only", "unrecognized"] @@ -39,8 +39,6 @@ IncludeType = ["local", "system", "raw"] LinkableDepType = ["static", "static_pic", "shared"] -Linkage = ["any", "static", "shared"] - LogLevel = ["off", "severe", "warning", "info", "config", "fine", "finer", "finest", "all"] OnDuplicateEntry = ["fail", "overwrite", "append"] @@ -49,8 +47,6 @@ SourceAbiVerificationMode = ["off", "log", "fail"] TestType = ["junit", "junit5", "testng"] -Traversal = ["tree", "node", "subfolders"] - UnusedDependenciesAction = ["unknown", "fail", "warn", "ignore", "unrecognized"] def _name_arg(name_type): @@ -129,7 +125,7 @@ def _platform_deps_arg(): def _labels_arg(): return { "labels": attrs.list(attrs.string(), default = [], doc = """ - Set of arbitrary strings which allow you to annotate a `build rule`with tags + Set of arbitrary strings which allow you to annotate a `build rule` with tags that can be searched for over an entire dependency tree using `buck query()` . """), @@ -217,6 +213,15 @@ def _allow_cache_upload_arg(): ), } +def _inject_test_env_arg(): + return { + # NOTE: We make this a `dep` not an `exec_dep` even though we'll execute + # it, because it needs to execute in the same platform as the test itself + # (we run tests in the target platform not the exec platform, since the + # goal is to test the code that is being built!). + "_inject_test_env": attrs.default_only(attrs.dep(default = "prelude//test/tools:inject_test_env")), + } + buck = struct( name_arg = _name_arg, deps_query_arg = _deps_query_arg, @@ -233,4 +238,5 @@ buck = struct( test_rule_timeout_ms = _test_rule_timeout_ms, target_os_type_arg = _target_os_type_arg, allow_cache_upload_arg = _allow_cache_upload_arg, + inject_test_env_arg = _inject_test_env_arg, ) diff --git a/prelude/decls/core_rules.bzl b/prelude/decls/core_rules.bzl index 3fe84354650..a7569c7065f 100644 --- a/prelude/decls/core_rules.bzl +++ b/prelude/decls/core_rules.bzl @@ -54,7 +54,7 @@ command_alias = prelude_rule( You can reference a `command_alias` target in - the `cmd` parameter of a `genrule()`by + the `cmd` parameter of a `genrule()` by using the `exe` macro: @@ -158,9 +158,12 @@ command_alias = prelude_rule( attrs = ( # @unsorted-dict-items { - "exe": attrs.option(attrs.dep(), default = None, doc = """ - A `build target`for a rule that outputs - an executable, such as an `sh\\_binary()`. + # Match `dep` before `source` so that we can support extracting the + # `RunInfo` provider of it, if one exists. + "exe": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None, doc = """ + A `build target` for a rule that outputs + an executable, such as an `sh_binary()`, + or an executable source file. """), "platform_exe": attrs.dict(key = attrs.enum(Platform), value = attrs.dep(), sorted = False, default = {}, doc = """ A mapping from platforms to `build target`. @@ -297,10 +300,6 @@ constraint_value = prelude_rule( export_file = prelude_rule( name = "export_file", docs = """ - **Warning:** this build rule is deprecated for folders. - Use `filegroup()`instead. It is still supported for individual files. - - An `export_file()` takes a single file or folder and exposes it so other rules can use it. """, @@ -577,11 +576,12 @@ genrule = prelude_rule( genrule_common.bash_arg() | genrule_common.cmd_exe_arg() | genrule_common.type_arg() | + genrule_common.weight_arg() | { "out": attrs.option(attrs.string(), default = None, doc = """ The name of the output file or directory. The complete path to this argument is provided to the shell command through - the `OUT` environment variable. Only one of`out` + the `OUT` environment variable. Only one of `out` or `outs` may be present. """), "outs": attrs.option(attrs.dict(key = attrs.string(), value = attrs.set(attrs.string(), sorted = False), sorted = False), default = None, doc = """ @@ -660,8 +660,8 @@ genrule = prelude_rule( ``` is not. """), - "env": attrs.dict(key = attrs.string(), value = attrs.arg(), sorted = False, default = {}), } | + genrule_common.env_arg() | genrule_common.environment_expansion_separator() | { "enable_sandbox": attrs.option(attrs.bool(), default = None, doc = """ @@ -696,7 +696,7 @@ http_archive = prelude_rule( An `http_archive()` rule is used to download and extract archives from the Internet to be used as dependencies for other rules. These rules are downloaded by running `fetch`, or can be downloaded as part of - `build`by setting `.buckconfig` + `build` by setting `.buckconfig` """, examples = """ Using `http_archive()`, third party packages can be downloaded from @@ -796,9 +796,9 @@ http_file = prelude_rule( docs = """ An `http_file()` rule is used to download files from the Internet to be used as dependencies for other rules. This rule only downloads single files, and can - optionally make them executable (see `http\\_file()executable`) + optionally make them executable (see `http_file()executable`) These rules are downloaded by running `fetch`, or can - be downloaded as part of `build`by setting `.buckconfig` + be downloaded as part of `build` by setting `.buckconfig` """, examples = """ Using `http_file()`, third party packages can be downloaded from @@ -877,7 +877,7 @@ http_file = prelude_rule( """), "executable": attrs.option(attrs.bool(), default = None, doc = """ Whether or not the file should be made executable after downloading. If true, - this can also be used via `run`and the + this can also be used via `run` and the `$(exe )` `string parameter macros` """), "contacts": attrs.list(attrs.string(), default = []), @@ -926,7 +926,7 @@ remote_file = prelude_rule( ``` Here's an example of a `remote_file()` using a `mvn` URL being referenced - by a `prebuilt\\_jar()`. + by a `prebuilt_jar()`. ``` @@ -1356,8 +1356,8 @@ worker_tool = prelude_rule( # @unsorted-dict-items { "exe": attrs.option(attrs.dep(), default = None, doc = """ - A `build target`for a rule that outputs - an executable, such as an `sh\\_binary()`. + A `build target` for a rule that outputs + an executable, such as an `sh_binary()`. Buck runs this executable only once per build. """), "args": attrs.one_of(attrs.arg(), attrs.list(attrs.arg()), default = [], doc = """ diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 8db1779ee02..77953754875 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -10,8 +10,11 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":apple_common.bzl", "apple_common") -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "Linkage", "Traversal", "buck", "prelude_rule") +load("@prelude//apple:apple_common.bzl", "apple_common") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") +load("@prelude//linking:link_info.bzl", "LinkStyle") +load("@prelude//linking:types.bzl", "Linkage") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":genrule_common.bzl", "genrule_common") load(":native_common.bzl", "native_common") @@ -117,7 +120,7 @@ cxx_binary = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg(anon_target_compatible = True))), default = []), @@ -127,6 +130,7 @@ cxx_binary = prelude_rule( "thin_lto": attrs.bool(default = False), "version_universe": attrs.option(attrs.string(), default = None), "weak_framework_names": attrs.list(attrs.string(), default = []), + "use_header_units": attrs.bool(default = False), } | buck.allow_cache_upload_arg() ), @@ -147,7 +151,7 @@ cxx_genrule = prelude_rule( the settings in `.buckconfig` and `.buckconfig.local`, and the result of various command-line overrides specified through - the `common\\_parameters`command-line option. + the `common_parameters` command-line option. This information is available only @@ -296,7 +300,7 @@ cxx_genrule = prelude_rule( Additionally, if you embed these paths in a shell script, you should - execute that script using the `sh\\_binary()`rule and include + execute that script using the `sh_binary()` rule and include the targets for these paths in the `resources` argument of that `sh_binary` rule. These are the same targets that you pass to the string parameter macros. @@ -362,7 +366,9 @@ cxx_genrule = prelude_rule( genrule_common.bash_arg() | genrule_common.cmd_exe_arg() | genrule_common.type_arg() | + genrule_common.weight_arg() | genrule_common.out_arg() | + genrule_common.env_arg() | genrule_common.environment_expansion_separator() | { "enable_sandbox": attrs.option(attrs.bool(), default = None, doc = """ @@ -391,13 +397,13 @@ cxx_library = prelude_rule( Whether a Buck command builds the `cxx_library` is determined by the inclusion of a top-level target, such as - a `cxx\\_binary()`or `android\\_binary()`, that + a `cxx_binary()` or `android_binary()`, that transitively depends on the `cxx_library`. The set of targets specified to the Buck command (`buck build`, `buck run`, etc) must include one of these top-level targets in order for Buck to build the `cxx_library`. Note that you could specify the top-level target - implicitly using a `build target pattern`or you could also specify - the top-level target using an buckconfig#`alias`defined in `.buckconfig`. + implicitly using a `build target pattern` or you could also specify + the top-level target using a buckconfig `alias` defined in `.buckconfig`. *How* Buck builds the library also depends on the specified top-level target. @@ -408,10 +414,10 @@ cxx_library = prelude_rule( #### Dependencies of the cxx\\_library also require a top-level target Similarly, in order for Buck to build a target that - the `cxx_library` depends on, such as a `cxx\\_genrule()`, + the `cxx_library` depends on, such as a `cxx_genrule()`, you must specify in the Buck command a top-level target that depends on the `cxx_library`. For example, you could specify - to `build`a `cxx_binary` that + to `build` a `cxx_binary` that depends on the `cxx_library`. If you specify as your build target the `cxx_library` itself, the build targets that the `cxx_library` depends on *might not be built*. @@ -517,17 +523,15 @@ cxx_library = prelude_rule( cxx_common.exported_post_platform_linker_flags_arg() | native_common.link_style() | native_common.link_whole(link_whole_type = attrs.option(attrs.bool(), default = None)) | + native_common.soname() | cxx_common.raw_headers_arg() | cxx_common.include_directories_arg() | cxx_common.public_include_directories_arg() | cxx_common.public_system_include_directories_arg() | { - "soname": attrs.option(attrs.string(), default = None, doc = """ - Sets the soname ("shared object name") of any shared library produced from this rule. - The default value is based on the full rule name. - The macro `$(ext)` will be replaced with a platform-appropriate extension. - An argument can be provided, which is a library version. - For example `soname = 'libfoo.$(ext 2.3)'` will be `libfoo.2.3.dylib` on Mac and `libfoo.so.2.3` on Linux. + "deffile": attrs.option(attrs.source(), default = None, doc = """ + Specifies the *.def file used on windows to modify a dll's exports in place of explicit `__declspec(dllexport)` declarations. + The default is to not use a defile. """), "used_by_wrap_script": attrs.bool(default = False, doc = """ When using an exopackage @@ -542,13 +546,15 @@ cxx_library = prelude_rule( } | cxx_common.supported_platforms_regex_arg() | cxx_common.force_static(force_static_type = attrs.option(attrs.bool(), default = None)) | - native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage), default = None)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage.values()), default = None)) | cxx_common.reexport_all_header_dependencies_arg() | cxx_common.exported_deps_arg() | cxx_common.exported_platform_deps_arg() | cxx_common.precompiled_header_arg() | apple_common.extra_xcode_sources() | apple_common.extra_xcode_files() | + apple_common.uses_explicit_modules_arg() | + apple_common.meta_apple_library_validation_enabled_arg() | { "bridging_header": attrs.option(attrs.source(), default = None), "can_be_asset": attrs.option(attrs.bool(), default = None), @@ -570,7 +576,7 @@ cxx_library = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "module_name": attrs.option(attrs.string(), default = None), "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), @@ -583,11 +589,39 @@ cxx_library = prelude_rule( "thin_lto": attrs.bool(default = False), "use_archive": attrs.option(attrs.bool(), default = None), "uses_cxx_explicit_modules": attrs.bool(default = False), - "uses_explicit_modules": attrs.bool(default = False), "version_universe": attrs.option(attrs.string(), default = None), "weak_framework_names": attrs.list(attrs.string(), default = []), - "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), - "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), + "use_header_units": attrs.bool(default = False, doc = """ + If True, makes any header unit exported by a dependency (including + recursively) through export_header_unit available to the compiler. If + false, the compilation ignores header units, regardless of what is + exported by dependencies. + """), + "export_header_unit": attrs.option(attrs.enum(["include", "preload"]), default = None, doc = """ + If not None, export a C++20 header unit visible to dependants (including + recursively) with use_header_units set to True. + + "include": replace includes of each file in exported_headers or + raw_headers with an import of the precompiled header unit; files + that do not include any of those headers do not load the header + unit. + + "preload": automatically load the precompiled header unit in any + dependant that uses header units. + """), + "export_header_unit_filter": attrs.list(attrs.string(), default = [], doc = """ + A list of regexes. Each regex should match a set of headers in + exported_headers or raw_headers to be precompiled together into one + C++20 header unit. + + When used with export_header_unit="include", this allows different + subsets of headers to be loaded only by files that use them. Each group + should only depend on headers in previous groups. + + If a header is not matched by any group, it is not precompiled and will + be included textually. If no filter is specified, the rule excludes + inline headers based on a name heuristics (e.g. "-inl.h"). + """), } | buck.allow_cache_upload_arg() ), @@ -598,7 +632,7 @@ cxx_precompiled_header = prelude_rule( docs = """ A `cxx_precompiled_header` rule specifies a single header file that can be precompiled and made available for use in other build rules such as - a `cxx\\_library()`or a `cxx\\_binary()`. + a `cxx_library()` or a `cxx_binary()`. This header file is precompiled by the preprocessor on behalf of the @@ -769,6 +803,11 @@ windows_resource = prelude_rule( further = None, attrs = ( cxx_common.srcs_arg() | + cxx_common.headers_arg() | + cxx_common.platform_headers_arg() | + cxx_common.header_namespace_arg() | + cxx_common.raw_headers_arg() | + cxx_common.include_directories_arg() | { "labels": attrs.list(attrs.string(), default = []), } @@ -797,6 +836,7 @@ cxx_test = prelude_rule( further = None, attrs = ( # @unsorted-dict-items + buck.inject_test_env_arg() | cxx_common.srcs_arg() | cxx_common.headers_arg() | cxx_common.preprocessor_flags_arg() | @@ -854,6 +894,7 @@ cxx_test = prelude_rule( buck.test_rule_timeout_ms() | native_common.link_group_deps() | native_common.link_group_public_deps_label() | + native_common.link_style() | { "additional_coverage_targets": attrs.list(attrs.source(), default = []), "contacts": attrs.list(attrs.string(), default = []), @@ -878,8 +919,7 @@ cxx_test = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), - "link_style": attrs.option(attrs.enum(LinkableDepType), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "linker_extra_outputs": attrs.list(attrs.string(), default = []), "platform_compiler_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), @@ -947,6 +987,7 @@ cxx_toolchain = prelude_rule( "debug_path_prefix_map_sanitizer_format": attrs.option(attrs.string(), default = None), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "detailed_untracked_header_messages": attrs.bool(default = False), + "dist_thin_lto_codegen_flags": attrs.list(attrs.arg(), default = []), "filepath_length_limited": attrs.bool(default = False), "headers_as_raw_headers_mode": attrs.option(attrs.enum(HeadersAsRawHeadersMode), default = None), "headers_whitelist": attrs.list(attrs.string(), default = []), @@ -957,13 +998,19 @@ cxx_toolchain = prelude_rule( "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_path_normalization_args_enabled": attrs.bool(default = False), - "link_style": attrs.string(default = "static"), + "link_style": attrs.enum( + LinkStyle.values(), + default = "static", + doc = """ + The default value of the `link_style` attribute for rules that use this toolchain. + """, + ), "linker": attrs.source(), "linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "linker_type": attrs.enum(LinkerProviderType), "nm": attrs.source(), "objcopy_for_shared_library_interface": attrs.source(), - "objcopy_recalculates_layout": attrs.bool(default = False), + "objdump": attrs.option(attrs.source(), default = None), "object_file_extension": attrs.string(default = ""), "pic_type_for_shared_linking": attrs.enum(PicType, default = "pic"), "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), @@ -1112,7 +1159,7 @@ prebuilt_cxx_library = prelude_rule( cxx_common.exported_platform_preprocessor_flags_arg() | cxx_common.exported_linker_flags_arg() | cxx_common.force_static(force_static_type = attrs.bool(default = False)) | - native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage), default = None)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage.values()), default = None)) | cxx_common.exported_deps_arg() | cxx_common.exported_platform_deps_arg() | cxx_common.supports_merged_linking() | @@ -1121,12 +1168,17 @@ prebuilt_cxx_library = prelude_rule( "can_be_asset": attrs.bool(default = False), "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), + "deffile": attrs.option(attrs.source(), default = None, doc = """ + Specifies the *.def file used on windows to modify a dll's exports in place of explicit `__declspec(dllexport)` declarations. + The default is to not use a defile. + """), "deps": attrs.list(attrs.dep(), default = []), "exported_lang_platform_preprocessor_flags": attrs.dict(key = attrs.enum(CxxSourceType), value = attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg()))), sorted = False, default = {}), "exported_lang_preprocessor_flags": attrs.dict(key = attrs.enum(CxxSourceType), value = attrs.list(attrs.arg()), sorted = False, default = {}), "exported_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg(anon_target_compatible = True))), default = []), "exported_post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "exported_post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg(anon_target_compatible = True))), default = []), + "extract_soname": attrs.bool(default = False), "frameworks": attrs.list(attrs.string(), default = []), "import_lib": attrs.option(attrs.source(), default = None), "include_in_android_merge_map_output": attrs.bool(default = True), @@ -1136,6 +1188,7 @@ prebuilt_cxx_library = prelude_rule( "link_whole": attrs.bool(default = False), "link_without_soname": attrs.bool(default = False), "platform_import_lib": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.source())), default = None), + "prestripped": attrs.bool(default = False, doc = "When set, skips running `strip` commands when building this library."), "provided": attrs.bool(default = False), "soname": attrs.option(attrs.string(), default = None), "supports_shared_library_interface": attrs.bool(default = True), @@ -1252,7 +1305,7 @@ prebuilt_cxx_library_group = prelude_rule( "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "deps": attrs.list(attrs.dep(), default = []), "import_libs": attrs.dict(key = attrs.string(), value = attrs.source(), sorted = False, default = {}), - "include_dirs": attrs.list(attrs.source(), default = []), + "include_dirs": attrs.list(attrs.source(allow_directory = True), default = []), "include_in_android_merge_map_output": attrs.bool(default = True), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), diff --git a/prelude/decls/erlang_rules.bzl b/prelude/decls/erlang_rules.bzl index dbee8c448bd..4ee634bb9e7 100644 --- a/prelude/decls/erlang_rules.bzl +++ b/prelude/decls/erlang_rules.bzl @@ -88,7 +88,7 @@ rules_attributes = { This attribute controls if the output of the builds also create edoc chunks. """), "env": attrs.option(attrs.dict(key = attrs.string(), value = attrs.string()), default = None, doc = """ - The `env` field allows to set the application env variables. The key value pairs will materialise in tha applications `.app` + The `env` field allows to set the application env variables. The key value pairs will materialise in the application's `.app` file and can then be accessed by [`application:get_env/2`](https://www.erlang.org/doc/man/application.html#get_env-2). """), "erl_opts": attrs.option(attrs.list(attrs.string()), default = None, doc = """ @@ -122,7 +122,7 @@ rules_attributes = { applications `.app` file and can be accessed by `file:consult/1`. """), "include_src": attrs.bool(default = True, doc = """ - This field controlls if the generated application directory contains a src/ directory with the Erlang code or not. + This field controls if the generated application directory contains a src/ directory with the Erlang code or not. """), "includes": attrs.list(attrs.source(), default = [], doc = """ The public header files accessible via `-include_lib("appname/include/header.hrl")` from other erlang files. @@ -133,9 +133,9 @@ rules_attributes = { of the corresponding Erlang terms. """), "peek_private_includes": attrs.bool(default = False, doc = """ - This attribute allows you to use the private includes of the applictions dependencies. This can be useful for + This attribute allows you to use the private includes of the application's dependencies. This can be useful for test applications, to create shared abstractions for tests. It's not advisable to use this attribute for prodution - code. All private inclues transitively must be non-ambiguous. + code. All private includes transitively must be non-ambiguous. """), "resources": attrs.list(attrs.dep(), default = [], doc = """ The `resources` field specifies targets whose default output are placed in the applications `priv/` directory. For @@ -153,6 +153,12 @@ rules_attributes = { This field indicates if global parse_tranforms should be applied to this application as well. It often makes sense for third-party dependencies to not be subjected to global parse_transforms, similar to OTP applications. """), + "xrl_includefile": attrs.option(attrs.source(), default = None, doc = """ + Customised prologue file to replace the default. See [`includefile` option](https://www.erlang.org/doc/apps/parsetools/leex.html#file/2) for details. + """), + "yrl_includefile": attrs.option(attrs.source(), default = None, doc = """ + Customised prologue file to replace the default. See [`includefile` option](https://www.erlang.org/doc/apps/parsetools/yecc.html#file/2) for details. + """), } | common_application_attributes, "erlang_app_includes": { "application_name": attrs.string(), @@ -160,6 +166,12 @@ rules_attributes = { "_toolchain": attrs.toolchain_dep(default = "toolchains//:erlang-default"), }, "erlang_escript": { + "bundled": attrs.bool(default = True, doc = """ + Setting bundled to `True` does generate a folder structure and escript trampoline instead of an archive. + """), + "configs": attrs.list(attrs.dep(), default = [], doc = """ + This attribute allows to set config files for the escript. The dependencies that are typically used + here are `export_file` targets."""), "deps": attrs.list(attrs.dep(), doc = """ List of Erlang applications that are bundled in the escript. This includes all transitive dependencies as well. """), @@ -173,7 +185,7 @@ rules_attributes = { `resources` field, the `priv` folders files can then be accessed by `escript"extract/2`. """), "main_module": attrs.option(attrs.string(), default = None, doc = """ - Overrides the default main module. Instead of defering the main module from the scripts filename, the specified module + Overrides the default main module. Instead of deferring the main module from the scripts filename, the specified module is used. That module needs to export a `main/1` function that is called as entry point. """), "resources": attrs.list(attrs.dep(), default = [], doc = """ @@ -206,11 +218,11 @@ rules_attributes = { [`permanent`](https://www.erlang.org/doc/man/application.html#type-restart_type). """), "include_erts": attrs.bool(default = False, doc = """ - This field controls wether OTP applications and the Erlang runtime system should be included as part of the release. + This field controls whether OTP applications and the Erlang runtime system should be included as part of the release. Please note, that at the moment the erts folder is just `erts/`. """), "multi_toolchain": attrs.option(attrs.list(attrs.dep()), default = None, doc = """ - This field controls wether the release should be built with a single toolchain, or multiple toolchains. In the + This field controls whether the release should be built with a single toolchain, or multiple toolchains. In the latter case, all output paths are prefixed with the toolchain name. """), "overlays": attrs.dict(key = attrs.string(), value = attrs.list(attrs.dep()), default = {}, doc = """ @@ -251,7 +263,7 @@ rules_attributes = { List of additional Common Test hooks. The strings are interpreted as Erlang terms. """), "extra_erl_flags": attrs.list(attrs.string(), default = [], doc = """ - List of additional command line arguments given to the erl command invocation. These + List of additional command line arguments given to the erl command invocation. These arguments are added to the front of the argument list. """), "preamble": attrs.string(default = read_root_config("erlang", "erlang_test_preamble", "test:info(),test:ensure_initialized(),test:start_shell()."), doc = """ @@ -269,17 +281,17 @@ rules_attributes = { "suite": attrs.source(doc = """ The source file for the test suite. If you are using the macro, you should use the `suites` attribute instead. - The suites attribtue specify which erlang_test targets should be generated. For each suite "path_to_suite/suite_SUITE.erl" an + The suites attribute specifies which erlang_test targets should be generated. For each suite "path_to_suite/suite_SUITE.erl" an implicit 'erlang_test' target suite_SUITE will be generated. """), "_artifact_annotation_mfa": attrs.string(default = "artifact_annotations:default_annotation/1"), "_cli_lib": attrs.dep(default = "prelude//erlang/common_test/test_cli_lib:test_cli_lib"), "_ct_opts": attrs.string(default = read_root_config("erlang", "erlang_test_ct_opts", "")), "_providers": attrs.string(default = ""), - "_test_binary": attrs.dep(default = "prelude//erlang/common_test/test_binary:escript"), "_test_binary_lib": attrs.dep(default = "prelude//erlang/common_test/test_binary:test_binary"), "_toolchain": attrs.toolchain_dep(default = "toolchains//:erlang-default"), - "_trampoline": attrs.option(attrs.dep(), default = None), + "_trampoline": attrs.option(attrs.dep(), default = None, doc = "DEPRECATED. Use _trampolines instead."), + "_trampolines": attrs.option(attrs.list(attrs.dep()), default = None), } | common_shell_attributes | re_test_args(), } @@ -525,7 +537,7 @@ erlang_test = prelude_rule( For each suite `_SUITE.erl`, if a data_dir `_SUITE_data` is present along the suite, (as per [the data_dir naming scheme for ct](https://www.erlang.org/doc/apps/common_test/write_test_chapter.html#data-and-private-directories)), - it will automatically adds the coresponding resource target to the generated test target of the suite. + it will automatically adds the corresponding resource target to the generated test target of the suite. Resources will be placed in the [Data directory (data_dir)](https://www.erlang.org/doc/apps/common_test/write_test_chapter.html#data_priv_dir) of each of the suite. @@ -541,7 +553,7 @@ erlang_test = prelude_rule( of the tests. One can call - - `buck2 build //my_app:test_SUITE` to compile the test files together with its depedencies. + - `buck2 build //my_app:test_SUITE` to compile the test files together with its dependencies. - `buck2 test //my_app:other_test_SUITE` to run the test. - `buck2 run //my_app:other_test_SUITE` to open an interactive test shell, where tests can be run iteratively. diff --git a/prelude/decls/genrule_common.bzl b/prelude/decls/genrule_common.bzl index aa142a55841..d0eb117ecdc 100644 --- a/prelude/decls/genrule_common.bzl +++ b/prelude/decls/genrule_common.bzl @@ -88,70 +88,6 @@ def _cmd_arg(): A temporary directory which can be used for intermediate results and will not be bundled into the output. - - - ##### String parameter macros - - It is also possible to expand references to other rules within the - `cmd`, using builtin `string parameter macros` - . - All build rules expanded in the command are automatically considered - to be dependencies of the `genrule()`. - - - Note that the paths returned by these macros are *relative* paths. Using - relative paths ensures that your builds are *hermetic*, that - is, they are reproducible across different machine environments. - - - Additionally, if you embed these paths in a shell script, you should - execute that script using the `sh\\_binary()`rule and include - the targets for these paths in the `resources` argument of - that `sh_binary` rule. These are the same targets that you - pass to the string parameter macros. - - - `$(classpath //path/to:target)` - - - Expands to the transitive classpath of the specified build - rule, provided that the rule has a Java classpath. If the rule - does not have (or contribute to) a classpath, then an - exception is thrown and the build breaks. - - - `$(exe //path/to:target)` - - - Expands a build rule that results in an executable to the - commands necessary to run that executable. For example, - a `java_binary()` might expand to a call - to `java -jar path/to/target.jar` . Files that are - executable (perhaps generated by a `genrule()`) - are also expanded. If the build rule does not generate an - executable output, then an exception is thrown and the build - breaks. - - - `$(location //path/to:target)` - - - Expands to the location of the output of the specified build - rule. This means that you can refer to the output without - needing to be aware of how Buck is storing data on the disk - mid-build. - - - `$(maven_coords //path/to:target)` - - - Expands to the Maven coordinates for the specified build rule. - This allows you to access the Maven coordinates for - Maven-aware build rules. The format of the expansion is: - - ``` - - ``` """), } @@ -174,6 +110,13 @@ def _cmd_exe_arg(): """), } +def _weight_arg(): + return { + "weight": attrs.option(attrs.int(), default = None, doc = """ + How many local slots these genrule should take when executing locally. +"""), + } + def _out_arg(): return { "out": attrs.option(attrs.string(), default = None, doc = """ @@ -244,6 +187,13 @@ def _environment_expansion_separator(): """), } +def _env_arg(): + return { + "env": attrs.dict(key = attrs.string(), value = attrs.arg(), sorted = False, default = {}, doc = """ + A map of variables to be set in the environment where the shell command is run. +"""), + } + genrule_common = struct( srcs_arg = _srcs_arg, cmd_arg = _cmd_arg, @@ -251,5 +201,7 @@ genrule_common = struct( cmd_exe_arg = _cmd_exe_arg, out_arg = _out_arg, type_arg = _type_arg, + weight_arg = _weight_arg, environment_expansion_separator = _environment_expansion_separator, + env_arg = _env_arg, ) diff --git a/prelude/decls/go_common.bzl b/prelude/decls/go_common.bzl index dc7042a3ec7..4c26077dc78 100644 --- a/prelude/decls/go_common.bzl +++ b/prelude/decls/go_common.bzl @@ -30,6 +30,16 @@ def _srcs_arg(): """), } +def _package_root_arg(): + return { + "package_root": attrs.option(attrs.string(), default = None, doc = """ + Sets Go package direactory (relative to BUCK file). + By default (or if None passes) package_root is being detected automatically. + Empty string of Go package is on the same level as BUCK file otherwise the subdirectory name. + Example for srcs = ["foo/bar.go"], package_root = "foo" +"""), + } + def _link_style_arg(): return { "link_style": attrs.option(attrs.enum(LinkableDepType), default = None, doc = """ @@ -48,13 +58,6 @@ def _link_mode_arg(): """), } -def _cgo_compiler_flags_arg(): - return { - "cgo_compiler_flags": attrs.list(attrs.string(), default = [], doc = """ - The set of additional compiler flags to pass to `go tool cgo`. -"""), - } - def _package_name_arg(): return { "package_name": attrs.option(attrs.string(), default = None, doc = """ @@ -88,8 +91,6 @@ def _external_linker_flags_arg(): return { "external_linker_flags": attrs.list(attrs.arg(), default = [], doc = """ Extra external linker flags passed to go link via `-extld` argument. - If argument is non-empty or `cgo_library` is used, the link mode - will switch to `external`. """), } @@ -127,8 +128,15 @@ def _embedcfg_arg(): def _cgo_enabled_arg(): return { "cgo_enabled": attrs.option(attrs.bool(), default = None, doc = """ - Experimental: Analog of CGO_ENABLED environment-variable. - None will be coverted to True if cxx_toolchain availabe for current configuration, otherwiese False. + Analog of CGO_ENABLED env-var, applies to this target and its dependencies. + If None `go_toolchain.default_cgo_enabled` value will be applied. +"""), + } + +def _override_cgo_enabled_arg(): + return { + "override_cgo_enabled": attrs.option(attrs.bool(), default = None, doc = """ + Per-target analog of CGO_ENABLED env-var, overrides its value for the target, but not for its dependencies. """), } @@ -139,6 +147,13 @@ def _race_arg(): """), } +def _asan_arg(): + return { + "asan": attrs.bool(default = False, doc = """ + If true, enable ASAN. +"""), + } + def _tags_arg(): return { "tags": attrs.list(attrs.string(), default = [], doc = """ @@ -146,12 +161,34 @@ def _tags_arg(): """), } +def _cxx_compiler_flags_arg(): + return { + "cxx_compiler_flags": attrs.list(attrs.arg(), default = [], doc = """ + GCC/Clang flags to use when compiling any of the above C/C++ sources (which require compilation). +"""), + } + +def _cxx_preprocessor_flags_arg(): + return { + "cxx_preprocessor_flags": attrs.list(attrs.arg(), default = [], doc = """ + GCC/Clang flags to use when preprocessing any of the above C/C++ sources (which require preprocessing). +"""), + } + +def _generate_exported_header(): + return { + "generate_exported_header": attrs.bool(default = False, doc = """ + Generate header file with declaration for functions exported with `//export` + The header name for target `cell//foo/bar:lib` will be `foo/bar/lib.h` +"""), + } + go_common = struct( deps_arg = _deps_arg, srcs_arg = _srcs_arg, + package_root_arg = _package_root_arg, link_style_arg = _link_style_arg, link_mode_arg = _link_mode_arg, - cgo_compiler_flags_arg = _cgo_compiler_flags_arg, package_name_arg = _package_name_arg, compiler_flags_arg = _compiler_flags_arg, assembler_flags_arg = _assembler_flags_arg, @@ -159,6 +196,11 @@ go_common = struct( external_linker_flags_arg = _external_linker_flags_arg, embedcfg_arg = _embedcfg_arg, cgo_enabled_arg = _cgo_enabled_arg, + override_cgo_enabled_arg = _override_cgo_enabled_arg, race_arg = _race_arg, + asan_arg = _asan_arg, tags_arg = _tags_arg, + cxx_compiler_flags_arg = _cxx_compiler_flags_arg, + cxx_preprocessor_flags_arg = _cxx_preprocessor_flags_arg, + generate_exported_header = _generate_exported_header, ) diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index 3f56acaf0ba..c0b208aa72b 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -10,127 +10,15 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "Traversal", "buck", "prelude_rule") +load(":common.bzl", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":go_common.bzl", "go_common") -load(":native_common.bzl", "native_common") load(":re_test_common.bzl", "re_test_common") BuildMode = ["executable", "c_shared", "c_archive"] GoTestCoverStepMode = ["set", "count", "atomic", "none"] -cgo_library = prelude_rule( - name = "cgo_library", - docs = """ - A cgo\\_library() rule builds an object from the supplied set of Go/C source files and - dependencies. The outputs are linked into go executable in the last step (compile). - - The 'go build' command would collect the cgo directives from the source files, however - with buck the flags needs to be passed in the cgo\\_library manually - - This rule borrows from `cxx\\_binary()`since C/C++ sources are being compiled. - """, - examples = """ - ``` - - # A rule that builds a Go native executable with linked cgo library based on - # C/C++ util library. - go_binary( - name = "bin", - srcs = ["main.go"], - deps = [":lib"] - ) - - cgo_library( - name = "lib", - srcs = ["cgo_source.go"], - deps = [":util"], - ) - - cxx_library( - name = "util", - srcs = ["util.c"], - headers = ["util.h"], - ) - - ``` - """, - further = None, - attrs = ( - # @unsorted-dict-items - go_common.package_name_arg() | - { - "srcs": attrs.list(attrs.one_of(attrs.source(), attrs.tuple(attrs.source(), attrs.list(attrs.arg()))), default = [], doc = """ - The set of source files to be compiled by this rule. .go files will be compiled with the CGO - compiler. Each file needs to have `import "C"` declared. - """), - "go_srcs": attrs.list(attrs.source(), default = [], doc = """ - The set of source files to be compiled by this rule. Go (`.go`) files are compiled with the Go - compiler. In contrast to the `srcs` argument, these files *cannot* have `import "C"` declared. - """), - } | - cxx_common.headers_arg() | - cxx_common.preprocessor_flags_arg() | - cxx_common.platform_preprocessor_flags_arg() | - go_common.cgo_compiler_flags_arg() | - go_common.embedcfg_arg() | - cxx_common.compiler_flags_arg() | - cxx_common.platform_compiler_flags_arg() | - cxx_common.linker_extra_outputs_arg() | - cxx_common.linker_flags_arg() | - cxx_common.platform_linker_flags_arg() | - native_common.link_style() | - cxx_common.raw_headers_arg() | - { - "go_compiler_flags": attrs.list(attrs.string(), default = [], doc = """ - The set of additional compiler flags to pass to `go tool compile`. - """), - "go_assembler_flags": attrs.list(attrs.string(), default = [], doc = """ - The set of additional assembler flags to pass to `go tool asm`. - """), - "contacts": attrs.list(attrs.string(), default = []), - "cxx_runtime_type": attrs.option(attrs.enum(CxxRuntimeType), default = None), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "default_platform": attrs.option(attrs.string(), default = None), - "defaults": attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False, default = {}), - "deps": attrs.list(attrs.dep(), default = []), - "deps_query": attrs.option(attrs.query(), default = None), - "devirt_enabled": attrs.bool(default = False), - "executable_name": attrs.option(attrs.string(), default = None), - "exported_deps": attrs.list(attrs.dep(), default = []), - "fat_lto": attrs.bool(default = False), - "focused_list_target": attrs.option(attrs.dep(), default = None), - "frameworks": attrs.list(attrs.string(), default = []), - "header_namespace": attrs.option(attrs.string(), default = None), - "headers_as_raw_headers_mode": attrs.option(attrs.enum(HeadersAsRawHeadersMode), default = None), - "include_directories": attrs.set(attrs.string(), sorted = True, default = []), - "labels": attrs.list(attrs.string(), default = []), - "lang_compiler_flags": attrs.dict(key = attrs.enum(CxxSourceType), value = attrs.list(attrs.arg()), sorted = False, default = {}), - "lang_platform_compiler_flags": attrs.dict(key = attrs.enum(CxxSourceType), value = attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg()))), sorted = False, default = {}), - "lang_platform_preprocessor_flags": attrs.dict(key = attrs.enum(CxxSourceType), value = attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg()))), sorted = False, default = {}), - "lang_preprocessor_flags": attrs.dict(key = attrs.enum(CxxSourceType), value = attrs.list(attrs.arg()), sorted = False, default = {}), - "libraries": attrs.list(attrs.string(), default = []), - "licenses": attrs.list(attrs.source(), default = []), - "link_deps_query_whole": attrs.bool(default = False), - "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), - "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), - "platform_headers": attrs.list(attrs.tuple(attrs.regex(), attrs.named_set(attrs.source(), sorted = True)), default = []), - "platform_srcs": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.one_of(attrs.source(), attrs.tuple(attrs.source(), attrs.list(attrs.arg()))), sorted = True)), default = []), - "post_linker_flags": attrs.list(attrs.arg(), default = []), - "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), - "precompiled_header": attrs.option(attrs.source(), default = None), - "prefer_stripped_objects": attrs.bool(default = False), - "prefix_header": attrs.option(attrs.source(), default = None), - "thin_lto": attrs.bool(default = False), - "version_universe": attrs.option(attrs.string(), default = None), - "weak_framework_names": attrs.list(attrs.string(), default = []), - } | - buck.allow_cache_upload_arg() - ), -) - go_binary = prelude_rule( name = "go_binary", docs = """ @@ -184,9 +72,15 @@ go_binary = prelude_rule( go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | go_common.embedcfg_arg() | + go_common.package_root_arg() | go_common.cgo_enabled_arg() | go_common.race_arg() | + go_common.asan_arg() | go_common.tags_arg() | + cxx_common.headers_arg() | + cxx_common.header_namespace_arg() | + go_common.cxx_preprocessor_flags_arg() | + go_common.cxx_compiler_flags_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files to be symlinked into the working directory of the test. You can access these in your @@ -197,7 +91,6 @@ go_binary = prelude_rule( "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "platform": attrs.option(attrs.string(), default = None), - "platform_external_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), } ), ) @@ -222,13 +115,12 @@ go_exported_library = prelude_rule( deps = [":example"], ) - cgo_library( + go_library( name = "example", package_name = "cgo", srcs = [ "export-to-c.go", # file with //export annotations ], - cgo_compiler_flags = [], compiler_flags = [], headers = [], ) @@ -265,15 +157,22 @@ go_exported_library = prelude_rule( `gcflags`, `ldflags` and `asmflags`` """), } | + cxx_common.headers_arg() | + cxx_common.header_namespace_arg() | + go_common.cxx_preprocessor_flags_arg() | + go_common.cxx_compiler_flags_arg() | go_common.link_style_arg() | go_common.link_mode_arg() | go_common.compiler_flags_arg() | go_common.assembler_flags_arg() | go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | + go_common.package_root_arg() | go_common.cgo_enabled_arg() | go_common.race_arg() | + go_common.asan_arg() | go_common.tags_arg() | + go_common.generate_exported_header() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files to be symlinked into the working directory of the test. You can access these in your @@ -285,7 +184,6 @@ go_exported_library = prelude_rule( "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "platform": attrs.option(attrs.string(), default = None), - "platform_external_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), } ), ) @@ -323,10 +221,18 @@ go_library = prelude_rule( go_common.compiler_flags_arg() | go_common.assembler_flags_arg() | go_common.embedcfg_arg() | + go_common.package_root_arg() | + go_common.override_cgo_enabled_arg() | + cxx_common.headers_arg() | + cxx_common.header_namespace_arg() | + go_common.cxx_preprocessor_flags_arg() | + go_common.cxx_compiler_flags_arg() | + go_common.external_linker_flags_arg() | + go_common.link_style_arg() | + go_common.generate_exported_header() | { "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "exported_deps": attrs.list(attrs.dep(), default = []), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), } @@ -397,6 +303,7 @@ go_test = prelude_rule( further = None, attrs = ( # @unsorted-dict-items + buck.inject_test_env_arg() | go_common.srcs_arg() | { "library": attrs.option(attrs.dep(), default = None, doc = """ @@ -423,9 +330,15 @@ go_test = prelude_rule( go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | go_common.embedcfg_arg() | + go_common.package_root_arg() | go_common.cgo_enabled_arg() | go_common.race_arg() | + go_common.asan_arg() | go_common.tags_arg() | + cxx_common.headers_arg() | + cxx_common.header_namespace_arg() | + go_common.cxx_preprocessor_flags_arg() | + go_common.cxx_compiler_flags_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files that are symlinked into the working directory of the @@ -452,72 +365,21 @@ go_test = prelude_rule( re_test_common.test_args() ), ) - -go_test_runner = prelude_rule( - name = "go_test_runner", - docs = "", - examples = None, - further = None, +go_bootstrap_binary = prelude_rule( + name = "go_bootstrap_binary", attrs = ( - # @unsorted-dict-items - { - "contacts": attrs.list(attrs.string(), default = []), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "labels": attrs.list(attrs.string(), default = []), - "licenses": attrs.list(attrs.source(), default = []), - "test_runner_generator": attrs.source(), - } - ), -) - -prebuilt_go_library = prelude_rule( - name = "prebuilt_go_library", - docs = """ - A prebuilt\\_go\\_library() rule provides a native library from the specified file. - """, - examples = """ - For more examples, check out our [integration tests](https://github.com/facebook/buck/tree/dev/test/com/facebook/buck/features/go/testdata). - - - ``` - - prebuilt_go_library( - name='greeting', - package_name='greeting', - library='greeting.a', - deps=[ - ':join', - ], - ) - - ``` - """, - further = None, - attrs = ( - # @unsorted-dict-items - { - "library": attrs.source(doc = """ - Path to the precompiled Go library - typically of the form 'foo.a'. - """), - } | - go_common.package_name_arg() | - go_common.deps_arg() | + go_common.srcs_arg() | { - "contacts": attrs.list(attrs.string(), default = []), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "exported_deps": attrs.list(attrs.dep(), default = []), - "labels": attrs.list(attrs.string(), default = []), - "licenses": attrs.list(attrs.source(), default = []), + "entrypoints": attrs.list(attrs.string(), default = [], doc = """Package name or file names"""), + "workdir": attrs.string(default = "", doc = """Change to subdir before running the command"""), } ), ) go_rules = struct( - cgo_library = cgo_library, go_binary = go_binary, + go_bootstrap_binary = go_bootstrap_binary, go_exported_library = go_exported_library, go_library = go_library, go_test = go_test, - go_test_runner = go_test_runner, - prebuilt_go_library = prebuilt_go_library, ) diff --git a/prelude/decls/groovy_rules.bzl b/prelude/decls/groovy_rules.bzl index 91f71860cc2..6152e1d76b3 100644 --- a/prelude/decls/groovy_rules.bzl +++ b/prelude/decls/groovy_rules.bzl @@ -11,6 +11,7 @@ # well-formatted (and then delete this TODO) load(":common.bzl", "ForkMode", "LogLevel", "SourceAbiVerificationMode", "TestType", "UnusedDependenciesAction", "prelude_rule") +load(":jvm_common.bzl", "jvm_common") groovy_library = prelude_rule( name = "groovy_library", @@ -70,22 +71,22 @@ groovy_library = prelude_rule( `.java`, cross compilation using the jdk found in `JAVA_HOME` will occur. """), "resources": attrs.list(attrs.source(), default = [], doc = """ - This is the same as in `java\\_library()`. + This is the same as in `java_library()`. """), "deps": attrs.list(attrs.dep(), default = [], doc = """ Rules (usually other `groovy_library` or ``java_library()`` rules) that are used to generate the classpath required to compile this `groovy_library`. - This is the same as in `java\\_library()`. + This is the same as in `java_library()`. """), "exported_deps": attrs.list(attrs.dep(), default = [], doc = """ Other `groovy_library` and ``java_library()`` rules that depend on this rule will also include its `exported_deps` in their classpaths. - This is the same as in `java\\_library()`. + This is the same as in `java_library()`. """), "provided_deps": attrs.list(attrs.dep(), default = [], doc = """ - This is the same as in `java\\_library()`. + This is the same as in `java_library()`. """), "extra_groovyc_arguments": attrs.list(attrs.string(), default = [], doc = """ List of additional arguments to pass into the Groovy compiler. @@ -93,22 +94,22 @@ groovy_library = prelude_rule( "source": attrs.option(attrs.string(), default = None, doc = """ Only used during cross compilation. - This is the same as in `java\\_library()`. + This is the same as in `java_library()`. """), "target": attrs.option(attrs.string(), default = None, doc = """ Only used during cross compilation. - This is the same as in `java\\_library()`. + This is the same as in `java_library()`. """), "java_version": attrs.option(attrs.string(), default = None, doc = """ Only used during cross compilation. - This is the same as in `java\\_library()`. + This is the same as in `java_library()`. """), "extra_arguments": attrs.list(attrs.string(), default = [], doc = """ Only used during cross compilation. - This is the same as in `java\\_library()`. + This is the same as in `java_library()`. """), "annotation_processor_deps": attrs.list(attrs.dep(), default = []), "annotation_processor_params": attrs.list(attrs.string(), default = []), @@ -116,14 +117,12 @@ groovy_library = prelude_rule( "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "exported_provided_deps": attrs.list(attrs.dep(), default = []), - "javac": attrs.option(attrs.source(), default = None), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), "never_mark_as_unused_dependency": attrs.option(attrs.bool(), default = None), "on_unused_dependencies": attrs.option(attrs.enum(UnusedDependenciesAction), default = None), - "plugins": attrs.list(attrs.dep(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), "remove_classes": attrs.list(attrs.regex(), default = []), "required_for_source_only_abi": attrs.bool(default = False), @@ -131,9 +130,8 @@ groovy_library = prelude_rule( "runtime_deps": attrs.list(attrs.dep(), default = []), "source_abi_verification_mode": attrs.option(attrs.enum(SourceAbiVerificationMode), default = None), "source_only_abi_deps": attrs.list(attrs.dep(), default = []), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } - ), + ) | jvm_common.plugins() | jvm_common.javac(), ) groovy_test = prelude_rule( @@ -160,14 +158,12 @@ groovy_test = prelude_rule( "extra_groovyc_arguments": attrs.list(attrs.string(), default = []), "fork_mode": attrs.enum(ForkMode, default = "none"), "java_version": attrs.option(attrs.string(), default = None), - "javac": attrs.option(attrs.source(), default = None), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), "never_mark_as_unused_dependency": attrs.option(attrs.bool(), default = None), "on_unused_dependencies": attrs.option(attrs.enum(UnusedDependenciesAction), default = None), - "plugins": attrs.list(attrs.dep(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), "provided_deps": attrs.list(attrs.dep(), default = []), "remove_classes": attrs.list(attrs.regex(), default = []), @@ -189,9 +185,8 @@ groovy_test = prelude_rule( "use_cxx_libraries": attrs.option(attrs.bool(), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), "vm_args": attrs.list(attrs.arg(), default = []), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } - ), + ) | jvm_common.plugins() | jvm_common.javac(), ) groovy_rules = struct( diff --git a/prelude/decls/halide_rules.bzl b/prelude/decls/halide_rules.bzl index 52db23f1086..7ddda0ec7ca 100644 --- a/prelude/decls/halide_rules.bzl +++ b/prelude/decls/halide_rules.bzl @@ -10,7 +10,8 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "Traversal", "prelude_rule") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "prelude_rule") load(":cxx_common.bzl", "cxx_common") halide_library = prelude_rule( @@ -98,7 +99,7 @@ halide_library = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_style": attrs.option(attrs.enum(LinkableDepType), default = None), "linker_extra_outputs": attrs.list(attrs.string(), default = []), "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), diff --git a/prelude/decls/haskell_common.bzl b/prelude/decls/haskell_common.bzl index 1cbb8c45774..8a8ee90ecae 100644 --- a/prelude/decls/haskell_common.bzl +++ b/prelude/decls/haskell_common.bzl @@ -20,7 +20,7 @@ def _srcs_arg(): def _deps_arg(): return { "deps": attrs.list(attrs.dep(), default = [], doc = """ - Either `haskell\\_library()`or `prebuilt\\_haskell\\_library()`rules + Either `haskell_library()` or `prebuilt_haskell_library()` rules from which this rules sources import modules or native linkable rules exporting symbols this rules sources call into. """), diff --git a/prelude/decls/haskell_rules.bzl b/prelude/decls/haskell_rules.bzl index 64de2950773..0233dcdd4a1 100644 --- a/prelude/decls/haskell_rules.bzl +++ b/prelude/decls/haskell_rules.bzl @@ -10,7 +10,8 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":common.bzl", "LinkableDepType", "Linkage", "buck", "prelude_rule") +load("@prelude//linking:types.bzl", "Linkage") +load(":common.bzl", "LinkableDepType", "buck", "prelude_rule") load(":haskell_common.bzl", "haskell_common") load(":native_common.bzl", "native_common") @@ -166,7 +167,7 @@ haskell_library = prelude_rule( haskell_common.deps_arg() | buck.platform_deps_arg() | native_common.link_whole(link_whole_type = attrs.bool(default = False)) | - native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage.values())) | { "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -231,6 +232,7 @@ haskell_prebuilt_library = prelude_rule( } | haskell_common.exported_linker_flags_arg() | { + "exported_post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "contacts": attrs.list(attrs.string(), default = []), "cxx_header_dirs": attrs.list(attrs.source(), default = []), "db": attrs.source(), diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index 44fee26c4f9..e590b8c9dda 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -10,8 +10,18 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":apple_common.bzl", "apple_common") -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "Linkage", "Traversal", "buck", "prelude_rule") +load("@prelude//apple:apple_common.bzl", "apple_common") +load("@prelude//apple:apple_rules_impl_utility.bzl", "apple_dsymutil_attrs", "apple_test_extra_attrs", "get_apple_toolchain_attr") +load("@prelude//apple:apple_test_host_app_transition.bzl", "apple_test_host_app_transition") +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolsInfo") +load("@prelude//apple:apple_universal_executable.bzl", "apple_universal_executable_impl") +load("@prelude//apple:cxx_universal_executable.bzl", "cxx_universal_executable_impl") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") +load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") +load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//decls/toolchains_common.bzl", "toolchains_common") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "LinkableDepType", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":native_common.bzl", "native_common") @@ -85,7 +95,7 @@ apple_binary = prelude_rule( docs = """ An `apple_binary()` rule builds a native executable - such as an iOS or OSX app - from the supplied set of Objective-C/C++ source files and dependencies. It is similar to - a `cxx\\_binary()`rule with which it shares many attributes. In addition + a `cxx_binary()` rule with which it shares many attributes. In addition to those common attributes, `apple_binary()` has a some additional attributes that are specific to binaries intended to be built using the Apple toolchain. Note, however, that `apple_binary()` and `cxx_binary()` differ @@ -152,10 +162,11 @@ apple_binary = prelude_rule( apple_common.target_sdk_version() | apple_common.extra_xcode_sources() | apple_common.extra_xcode_files() | + apple_common.serialize_debugging_options_arg() | + apple_common.uses_explicit_modules_arg() | { "bridging_header": attrs.option(attrs.source(), default = None), "can_be_asset": attrs.option(attrs.bool(), default = None), - "configs": attrs.dict(key = attrs.string(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "contacts": attrs.list(attrs.string(), default = []), "cxx_runtime_type": attrs.option(attrs.enum(CxxRuntimeType), default = None), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -193,7 +204,7 @@ apple_binary = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_whole": attrs.option(attrs.bool(), default = None), "modular": attrs.bool(default = False), "module_name": attrs.option(attrs.string(), default = None), @@ -204,27 +215,24 @@ apple_binary = prelude_rule( "post_linker_flags": attrs.list(attrs.arg(), default = []), "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), "precompiled_header": attrs.option(attrs.source(), default = None), - "preferred_linkage": attrs.option(attrs.enum(Linkage), default = None), + "preferred_linkage": attrs.option(attrs.enum(Linkage.values()), default = None), "prefix_header": attrs.option(attrs.source(), default = None), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "raw_headers": attrs.set(attrs.source(), sorted = True, default = []), "reexport_all_header_dependencies": attrs.option(attrs.bool(), default = None), "sdk_modules": attrs.list(attrs.string(), default = []), - "serialize_debugging_options": attrs.bool(default = False), "soname": attrs.option(attrs.string(), default = None), "static_library_basename": attrs.option(attrs.string(), default = None), "supported_platforms_regex": attrs.option(attrs.regex(), default = None), "supports_merged_linking": attrs.option(attrs.bool(), default = None), "swift_compiler_flags": attrs.list(attrs.arg(), default = []), + "swift_module_skip_function_bodies": attrs.bool(default = True), "swift_version": attrs.option(attrs.string(), default = None), "thin_lto": attrs.bool(default = False), - "use_submodules": attrs.bool(default = False), + "use_submodules": attrs.bool(default = True), "uses_cxx_explicit_modules": attrs.bool(default = False), - "uses_explicit_modules": attrs.bool(default = False), "uses_modules": attrs.bool(default = False), - "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), - "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), } | buck.allow_cache_upload_arg() ), @@ -370,7 +378,7 @@ apple_bundle = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "platform_binary": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.dep())), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "resource_group_map": attrs.option(RESOURCE_GROUP_MAP_ATTR, default = None), "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), "try_skip_code_signing": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), @@ -441,17 +449,19 @@ apple_library = prelude_rule( cxx_common.exported_linker_flags_arg() | cxx_common.exported_platform_linker_flags_arg() | apple_common.target_sdk_version() | - native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage), default = None)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage.values()), default = None)) | native_common.link_style() | native_common.link_whole(link_whole_type = attrs.option(attrs.bool(), default = None)) | cxx_common.reexport_all_header_dependencies_arg() | cxx_common.exported_deps_arg() | apple_common.extra_xcode_sources() | apple_common.extra_xcode_files() | + apple_common.serialize_debugging_options_arg() | + apple_common.uses_explicit_modules_arg() | + apple_common.meta_apple_library_validation_enabled_arg() | { "bridging_header": attrs.option(attrs.source(), default = None), "can_be_asset": attrs.option(attrs.bool(), default = None), - "configs": attrs.dict(key = attrs.string(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "contacts": attrs.list(attrs.string(), default = []), "cxx_runtime_type": attrs.option(attrs.enum(CxxRuntimeType), default = None), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -486,7 +496,7 @@ apple_library = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "modular": attrs.bool(default = False), "module_name": attrs.option(attrs.string(), default = None), "module_requires_cxx": attrs.bool(default = False), @@ -498,24 +508,22 @@ apple_library = prelude_rule( "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), "precompiled_header": attrs.option(attrs.source(), default = None), "prefix_header": attrs.option(attrs.source(), default = None), + "public_framework_headers": attrs.named_set(attrs.source(), sorted = True, default = []), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "raw_headers": attrs.set(attrs.source(), sorted = True, default = []), "sdk_modules": attrs.list(attrs.string(), default = []), - "serialize_debugging_options": attrs.bool(default = False), "soname": attrs.option(attrs.string(), default = None), "static_library_basename": attrs.option(attrs.string(), default = None), "supported_platforms_regex": attrs.option(attrs.regex(), default = None), "supports_merged_linking": attrs.option(attrs.bool(), default = None), "swift_compiler_flags": attrs.list(attrs.arg(), default = []), + "swift_module_skip_function_bodies": attrs.bool(default = True), "swift_version": attrs.option(attrs.string(), default = None), "thin_lto": attrs.bool(default = False), - "use_submodules": attrs.bool(default = False), + "use_submodules": attrs.bool(default = True), "uses_cxx_explicit_modules": attrs.bool(default = False), - "uses_explicit_modules": attrs.bool(default = False), "uses_modules": attrs.bool(default = False), - "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), - "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), } | buck.allow_cache_upload_arg() ), @@ -667,7 +675,7 @@ apple_test = prelude_rule( apple_common.info_plist_arg() | apple_common.info_plist_substitutions_arg() | { - "test_host_app": attrs.option(attrs.dep(), default = None, doc = """ + "test_host_app": attrs.option(attrs.transition_dep(cfg = apple_test_host_app_transition), default = None, doc = """ A build target identifying an `apple_bundle()` rule that builds an application bundle. Output of the specified rule will be used as a test host of this test. This @@ -678,6 +686,9 @@ apple_test = prelude_rule( reference errors during compilation, but if the symbols do not exist, it might result in runtime crashes). """), + "embed_xctest_frameworks_in_test_host_app": attrs.option(attrs.bool(), default = None, doc = """ + Controls whether a marker constraint is added to the `test_host_app`. + """), } | cxx_common.srcs_arg() | cxx_common.platform_srcs_arg() | @@ -688,19 +699,19 @@ apple_test = prelude_rule( cxx_common.compiler_flags_arg() | cxx_common.platform_compiler_flags_arg() | cxx_common.linker_flags_arg() | - native_common.link_style() | apple_common.target_sdk_version() | buck.run_test_separately_arg(run_test_separately_type = attrs.bool(default = False)) | buck.test_label_arg() | apple_common.extra_xcode_sources() | apple_common.extra_xcode_files() | + apple_common.serialize_debugging_options_arg() | + apple_common.uses_explicit_modules_arg() | { "asset_catalogs_compilation_options": attrs.dict(key = attrs.string(), value = attrs.any(), default = {}), "bridging_header": attrs.option(attrs.source(), default = None), "can_be_asset": attrs.option(attrs.bool(), default = None), "codesign_flags": attrs.list(attrs.string(), default = []), "codesign_identity": attrs.option(attrs.string(), default = None), - "configs": attrs.dict(key = attrs.string(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "contacts": attrs.list(attrs.string(), default = []), "cxx_runtime_type": attrs.option(attrs.enum(CxxRuntimeType), default = None), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -743,7 +754,10 @@ apple_test = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, + # Used to create the shared test library. Any library deps whose `preferred_linkage` isn't "shared" will + # be treated as "static" deps and linked into the shared test library. + "link_style": attrs.enum(LinkableDepType, default = "static"), "link_whole": attrs.option(attrs.bool(), default = None), "linker_extra_outputs": attrs.list(attrs.string(), default = []), "modular": attrs.bool(default = False), @@ -755,8 +769,8 @@ apple_test = prelude_rule( "platform_preprocessor_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), "post_linker_flags": attrs.list(attrs.arg(), default = []), "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), - "precompiled_header": attrs.option(attrs.source(), default = None), - "preferred_linkage": attrs.option(attrs.enum(Linkage), default = None), + # The test source code and lib dependencies should be built into a shared library. + "preferred_linkage": attrs.enum(Linkage.values(), default = "shared"), "prefix_header": attrs.option(attrs.source(), default = None), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), @@ -764,7 +778,6 @@ apple_test = prelude_rule( "reexport_all_header_dependencies": attrs.option(attrs.bool(), default = None), "runner": attrs.option(attrs.dep(), default = None), "sdk_modules": attrs.list(attrs.string(), default = []), - "serialize_debugging_options": attrs.bool(default = False), "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), "snapshot_reference_images_path": attrs.option(attrs.one_of(attrs.source(), attrs.string()), default = None), "soname": attrs.option(attrs.string(), default = None), @@ -773,20 +786,20 @@ apple_test = prelude_rule( "supported_platforms_regex": attrs.option(attrs.regex(), default = None), "supports_merged_linking": attrs.option(attrs.bool(), default = None), "swift_compiler_flags": attrs.list(attrs.arg(), default = []), + "swift_module_skip_function_bodies": attrs.bool(default = True), "swift_version": attrs.option(attrs.string(), default = None), "test_rule_timeout_ms": attrs.option(attrs.int(), default = None), "thin_lto": attrs.bool(default = False), "try_skip_code_signing": attrs.option(attrs.bool(), default = None), "ui_test_target_app": attrs.option(attrs.dep(), default = None), - "use_submodules": attrs.bool(default = False), + "use_submodules": attrs.bool(default = True), "uses_cxx_explicit_modules": attrs.bool(default = False), - "uses_explicit_modules": attrs.bool(default = False), "uses_modules": attrs.bool(default = False), - "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), - "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), } | - buck.allow_cache_upload_arg() + buck.allow_cache_upload_arg() | + buck.inject_test_env_arg() | + apple_test_extra_attrs() ), ) @@ -815,6 +828,7 @@ apple_toolchain = prelude_rule( "libtool": attrs.source(), "licenses": attrs.list(attrs.source(), default = []), "lipo": attrs.source(), + "mapc": attrs.option(attrs.source(), default = None), "min_version": attrs.string(default = ""), "momc": attrs.source(), "platform_path": attrs.source(), @@ -905,7 +919,7 @@ prebuilt_apple_framework = prelude_rule( attrs = ( # @unsorted-dict-items { - "preferred_linkage": attrs.enum(Linkage, doc = """ + "preferred_linkage": attrs.enum(Linkage.values(), doc = """ How to link to a binary: use `dynamic` for a dynamic framework, and `static` for old universal static frameworks manually lipo-ed together. `dynamic` will @@ -914,7 +928,6 @@ prebuilt_apple_framework = prelude_rule( `static` will copy the resources of the framework into an Apple bundle. """), - "code_sign_on_copy": attrs.option(attrs.bool(), default = None), "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "deps": attrs.list(attrs.dep(), default = []), @@ -954,6 +967,7 @@ swift_library = prelude_rule( further = None, attrs = ( # @unsorted-dict-items + apple_common.serialize_debugging_options_arg() | { "bridging_header": attrs.option(attrs.source(), default = None), "compiler_flags": attrs.list(attrs.arg(), default = []), @@ -967,16 +981,15 @@ swift_library = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "module_name": attrs.option(attrs.string(), default = None), - "preferred_linkage": attrs.option(attrs.enum(Linkage), default = None), + "preferred_linkage": attrs.option(attrs.enum(Linkage.values()), default = None), "sdk_modules": attrs.list(attrs.string(), default = []), - "serialize_debugging_options": attrs.bool(default = False), "soname": attrs.option(attrs.string(), default = None), "srcs": attrs.list(attrs.source(), default = []), "supported_platforms_regex": attrs.option(attrs.regex(), default = None), "target_sdk_version": attrs.option(attrs.string(), default = None), - "uses_explicit_modules": attrs.bool(default = False), "version": attrs.option(attrs.string(), default = None), - } + } | + apple_common.uses_explicit_modules_arg() ), ) @@ -1000,7 +1013,6 @@ swift_toolchain = prelude_rule( "runtime_paths_for_bundling": attrs.list(attrs.string(), default = []), "runtime_paths_for_linking": attrs.list(attrs.string(), default = []), "runtime_run_paths": attrs.list(attrs.string(), default = []), - "sdk_dependencies_path": attrs.option(attrs.string(), default = None), "sdk_path": attrs.source(), "static_runtime_paths": attrs.list(attrs.string(), default = []), "supports_relative_resource_dir": attrs.bool(default = False), @@ -1015,79 +1027,112 @@ swift_toolchain = prelude_rule( ), ) -xcode_postbuild_script = prelude_rule( - name = "xcode_postbuild_script", - docs = "", - examples = None, - further = None, - attrs = ( - # @unsorted-dict-items - { - "cmd": attrs.string(default = ""), - "contacts": attrs.list(attrs.string(), default = []), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "input_file_lists": attrs.list(attrs.string(), default = []), - "inputs": attrs.list(attrs.string(), default = []), - "labels": attrs.list(attrs.string(), default = []), - "licenses": attrs.list(attrs.source(), default = []), - "output_file_lists": attrs.list(attrs.string(), default = []), - "outputs": attrs.list(attrs.string(), default = []), - "srcs": attrs.list(attrs.source(), default = []), - } - ), -) +_APPLE_TOOLCHAIN_ATTR = get_apple_toolchain_attr() -xcode_prebuild_script = prelude_rule( - name = "xcode_prebuild_script", - docs = "", +def _apple_universal_executable_attrs(): + attribs = { + "executable": attrs.split_transition_dep(cfg = cpu_split_transition, doc = """ + A build target identifying the binary which will be built for multiple architectures. + The target will be transitioned into different configurations, with distinct architectures. + + The target can be one of: + - `apple_binary()` and `cxx_binary()` + - `[shared]` subtarget of `apple_library()` and `cxx_library()` + - `apple_library()` and `cxx_library()` which have `preferred_linkage = shared` attribute + """), + "executable_name": attrs.option(attrs.string(), default = None, doc = """ + By default, the name of the universal executable is same as the name of the binary + from the `binary` target attribute. Set `executable_name` to override the default. + """), + "labels": attrs.list(attrs.string(), default = []), + "split_arch_dsym": attrs.bool(default = False, doc = """ + If enabled, each architecture gets its own dSYM binary. Use this if the combined + universal dSYM binary exceeds 4GiB. + """), + "universal": attrs.option(attrs.bool(), default = None, doc = """ + Controls whether the output is universal binary. Any value overrides the presence + of the `config//cpu/constraints:universal-enabled` constraint. Read the rule docs + for more information on resolution. + """), + "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, + "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), + } + attribs.update(apple_dsymutil_attrs()) + return attribs + +apple_universal_executable = prelude_rule( + name = "apple_universal_executable", + impl = apple_universal_executable_impl, + docs = """ + An `apple_universal_executable()` rule takes a target via its + `binary` attribute, builds it for multiple architectures and + combines the result into a single binary using `lipo`. + + The output of the rule is a universal binary: + - If `config//cpu/constraints:universal-enabled` is present in the target platform. + - If the `universal` attribute is set to `True`. + + If none of the conditions are met, then the rule acts as a nop `alias()`. + + The `universal` attribute, if present, takes precedence over constraint. + For example, if `universal = False`, then the presence of the constraint + would not affect the output. + + `apple_bundle()` supports building of universal binaries, + `apple_universal_executable()` is only needed if you have a standalone + binary target which is not embedded in an `apple_bundle()` (usually a + CLI tool). + """, examples = None, further = None, - attrs = ( - # @unsorted-dict-items - { - "cmd": attrs.string(default = ""), - "contacts": attrs.list(attrs.string(), default = []), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "input_file_lists": attrs.list(attrs.string(), default = []), - "inputs": attrs.list(attrs.string(), default = []), - "labels": attrs.list(attrs.string(), default = []), - "licenses": attrs.list(attrs.source(), default = []), - "output_file_lists": attrs.list(attrs.string(), default = []), - "outputs": attrs.list(attrs.string(), default = []), - "srcs": attrs.list(attrs.source(), default = []), - } - ), + attrs = _apple_universal_executable_attrs(), ) -xcode_workspace_config = prelude_rule( - name = "xcode_workspace_config", - docs = "", +def _cxx_universal_executable_attrs(): + return { + "executable": attrs.split_transition_dep(cfg = cpu_split_transition, doc = """ + A build target identifying the binary which will be built for multiple architectures. + The target will be transitioned into different configurations, with distinct architectures. + + The target can be one of: + - `cxx_binary()` + - `[shared]` subtarget `cxx_library()` + - `cxx_library()` which have `preferred_linkage = shared` attribute + """), + "executable_name": attrs.option(attrs.string(), default = None, doc = """ + By default, the name of the universal executable is same as the name of the binary + from the `binary` target attribute. Set `executable_name` to override the default. + """), + "labels": attrs.list(attrs.string(), default = []), + "universal": attrs.option(attrs.bool(), default = None, doc = """ + Controls whether the output is universal binary. Any value overrides the presence + of the `config//cpu/constraints:universal-enabled` constraint. Read the rule docs + for more information on resolution. + """), + "_cxx_toolchain": toolchains_common.cxx(), + } + +cxx_universal_executable = prelude_rule( + name = "cxx_universal_executable", + impl = cxx_universal_executable_impl, + docs = """ + A `cxx_universal_executable()` rule takes a target via its + `binary` attribute, builds it for multiple architectures and + combines the result into a single binary using `lipo`. + + The output of the rule is a universal binary: + - If `config//cpu/constraints:universal-enabled` is present in the target platform. + - If the `universal` attribute is set to `True`. + + If none of the conditions are met, then the rule acts as a nop `alias()`. + + The `universal` attribute, if present, takes precedence over constraint. + For example, if `universal = False`, then the presence of the constraint + would not affect the output. + """, examples = None, further = None, - attrs = ( - # @unsorted-dict-items - { - "action_config_names": attrs.dict(key = attrs.enum(SchemeActionType), value = attrs.string(), sorted = False, default = {}), - "additional_scheme_actions": attrs.option(attrs.dict(key = attrs.enum(SchemeActionType), value = attrs.dict(key = attrs.enum(AdditionalActions), value = attrs.list(attrs.string()), sorted = False), sorted = False), default = None), - "contacts": attrs.list(attrs.string(), default = []), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "environment_variables": attrs.option(attrs.dict(key = attrs.enum(SchemeActionType), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False), default = None), - "explicit_runnable_path": attrs.option(attrs.string(), default = None), - "extra_schemes": attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}), - "extra_shallow_targets": attrs.list(attrs.dep(), default = []), - "extra_targets": attrs.list(attrs.dep(), default = []), - "extra_tests": attrs.list(attrs.dep(), default = []), - "is_remote_runnable": attrs.option(attrs.bool(), default = None), - "labels": attrs.list(attrs.string(), default = []), - "launch_style": attrs.option(attrs.enum(LaunchStyle), default = None), - "licenses": attrs.list(attrs.source(), default = []), - "notification_payload_file": attrs.option(attrs.string(), default = None), - "src_target": attrs.option(attrs.dep(), default = None), - "was_created_for_app_extension": attrs.option(attrs.bool(), default = None), - "watch_interface": attrs.option(attrs.enum(WatchInterface), default = None), - "workspace_name": attrs.option(attrs.string(), default = None), - } - ), + attrs = _cxx_universal_executable_attrs(), ) ios_rules = struct( @@ -1100,12 +1145,11 @@ ios_rules = struct( apple_test = apple_test, apple_toolchain = apple_toolchain, apple_toolchain_set = apple_toolchain_set, + apple_universal_executable = apple_universal_executable, core_data_model = core_data_model, + cxx_universal_executable = cxx_universal_executable, prebuilt_apple_framework = prebuilt_apple_framework, scene_kit_assets = scene_kit_assets, swift_library = swift_library, swift_toolchain = swift_toolchain, - xcode_postbuild_script = xcode_postbuild_script, - xcode_prebuild_script = xcode_prebuild_script, - xcode_workspace_config = xcode_workspace_config, ) diff --git a/prelude/decls/java_rules.bzl b/prelude/decls/java_rules.bzl index 53f03a47420..1eaab693625 100644 --- a/prelude/decls/java_rules.bzl +++ b/prelude/decls/java_rules.bzl @@ -59,6 +59,7 @@ jar_genrule = prelude_rule( "enable_sandbox": attrs.option(attrs.bool(), default = None), "environment_expansion_separator": attrs.option(attrs.string(), default = None), "labels": attrs.list(attrs.string(), default = []), + "weight": attrs.option(attrs.int(), default = None), "licenses": attrs.list(attrs.source(), default = []), "need_android_tools": attrs.bool(default = False), "remote": attrs.option(attrs.bool(), default = None), @@ -231,7 +232,7 @@ java_library = prelude_rule( If any of the files in this list end in `.src.zip`, then the entries in the ZIP file that end in `.java` will be included as ordinary inputs to compilation. This is common when using - a `genrule()`to auto-generate some Java source code that + a `genrule()` to auto-generate some Java source code that needs to be compiled with some hand-written Java code. """), } | @@ -256,12 +257,6 @@ java_library = prelude_rule( "java_version": attrs.option(attrs.string(), default = None, doc = """ Equivalent to setting both `source` and `target` to the given value. Setting this and `source` or `target` (or both!) is an error. """), - "javac": attrs.option(attrs.source(), default = None, doc = """ - Specifies the Java compiler program to use for this rule. - The value is a source path (e.g., //foo/bar:bar). - Overrides the value in "javac" in the "tools" section - of `.buckconfig`. - """), "extra_arguments": attrs.list(attrs.string(), default = [], doc = """ List of additional arguments to pass into the Java compiler. These arguments follow the ones specified in `.buckconfig`. @@ -275,6 +270,8 @@ java_library = prelude_rule( jvm_common.source_only_abi_deps() | jvm_common.required_for_source_only_abi() | jvm_common.on_unused_dependencies() | + jvm_common.plugins() | + jvm_common.javac() | { "annotation_processor_deps": attrs.list(attrs.dep(), default = []), "annotation_processor_params": attrs.list(attrs.string(), default = []), @@ -287,11 +284,9 @@ java_library = prelude_rule( "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), "never_mark_as_unused_dependency": attrs.option(attrs.bool(), default = None), - "plugins": attrs.list(attrs.dep(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), "runtime_deps": attrs.list(attrs.dep(), default = []), "source_abi_verification_mode": attrs.option(attrs.enum(SourceAbiVerificationMode), default = None), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -327,6 +322,7 @@ java_test = prelude_rule( further = None, attrs = ( # @unsorted-dict-items + buck.inject_test_env_arg() | { "srcs": attrs.list(attrs.source(), default = [], doc = """ Like `java_library()`, @@ -379,7 +375,7 @@ java_test = prelude_rule( Same as `std_out_log_level`, but for std err. """), "use_cxx_libraries": attrs.option(attrs.bool(), default = None, doc = """ - Whether or not to build and link against `cxx\\_library()`dependencies when testing. + Whether or not to build and link against `cxx_library()` dependencies when testing. """), "cxx_library_whitelist": attrs.list(attrs.dep(), default = [], doc = """ EXPERIMENTAL. @@ -403,15 +399,14 @@ java_test = prelude_rule( "exported_deps": attrs.list(attrs.dep(), default = []), "exported_provided_deps": attrs.list(attrs.dep(), default = []), "extra_arguments": attrs.list(attrs.string(), default = []), + "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), "java_version": attrs.option(attrs.string(), default = None), "java": attrs.option(attrs.dep(), default = None), - "javac": attrs.option(attrs.source(), default = None), "licenses": attrs.list(attrs.source(), default = []), "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), "never_mark_as_unused_dependency": attrs.option(attrs.bool(), default = None), "on_unused_dependencies": attrs.option(attrs.enum(UnusedDependenciesAction), default = None), - "plugins": attrs.list(attrs.dep(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), "provided_deps": attrs.list(attrs.dep(), default = []), "remove_classes": attrs.list(attrs.regex(), default = []), @@ -425,9 +420,8 @@ java_test = prelude_rule( "test_case_timeout_ms": attrs.option(attrs.int(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } - ), + ) | jvm_common.plugins() | jvm_common.javac(), ) java_test_runner = prelude_rule( @@ -449,7 +443,6 @@ java_test_runner = prelude_rule( "exported_provided_deps": attrs.list(attrs.dep(), default = []), "extra_arguments": attrs.list(attrs.string(), default = []), "java_version": attrs.option(attrs.string(), default = None), - "javac": attrs.option(attrs.source(), default = None), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "main_class": attrs.string(default = ""), @@ -457,7 +450,6 @@ java_test_runner = prelude_rule( "maven_coords": attrs.option(attrs.string(), default = None), "never_mark_as_unused_dependency": attrs.option(attrs.bool(), default = None), "on_unused_dependencies": attrs.option(attrs.enum(UnusedDependenciesAction), default = None), - "plugins": attrs.list(attrs.dep(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), "provided_deps": attrs.list(attrs.dep(), default = []), "remove_classes": attrs.list(attrs.regex(), default = []), @@ -470,8 +462,7 @@ java_test_runner = prelude_rule( "source_only_abi_deps": attrs.list(attrs.dep(), default = []), "srcs": attrs.list(attrs.source(), default = []), "target": attrs.option(attrs.string(), default = None), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), - } + } | jvm_common.plugins() | jvm_common.javac() ), ) diff --git a/prelude/decls/js_rules.bzl b/prelude/decls/js_rules.bzl index bcc05534232..b820c8c1109 100644 --- a/prelude/decls/js_rules.bzl +++ b/prelude/decls/js_rules.bzl @@ -64,6 +64,7 @@ js_bundle_genrule = prelude_rule( "rewrite_sourcemap": attrs.bool(default = False), "skip_resources": attrs.bool(default = False), "srcs": attrs.named_set(attrs.source(), sorted = False, default = []), + "weight": attrs.option(attrs.int(), default = None), } ), ) diff --git a/prelude/decls/jvm_common.bzl b/prelude/decls/jvm_common.bzl index 86a674b21c1..3a9e0831fe9 100644 --- a/prelude/decls/jvm_common.bzl +++ b/prelude/decls/jvm_common.bzl @@ -154,6 +154,97 @@ def _k2(): """), } +def _incremental(): + return { + "incremental": attrs.bool(default = False, doc = """ + Enables Kotlin incremental compilation. + """), + } + +def _plugins(): + return { + "plugins": attrs.list( + attrs.one_of( + attrs.dep(), + attrs.tuple(attrs.dep(), attrs.list(attrs.string())), + ), + default = [], + ), + } + +def _kotlin_compiler_plugins(): + return { + "kotlin_compiler_plugins": attrs.dict(key = attrs.source(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}, doc = """ + Use this to specify [Kotlin compiler plugins](https://kotlinlang.org/docs/reference/compiler-plugins.html) to use when compiling this library. + This takes a map, with each entry specify one plugin. Entry's key is plugin source path, + and value is a map of plugin option key value pair. Unlike `extra_kotlinc_arguments`, + these can be *source paths*, not just strings. + + A special option value is + `__codegen_dir__`, in which case Buck will provide a default codegen folder's path as + option value instead. + E.g. + + ``` +fbcode/buck2/prelude/decls/jvm_common.bzl + kotlin_compiler_plugins = { + "somePluginSourcePath": { + "plugin:somePluginId:somePluginOptionKey": "somePluginOptionValue", + "plugin:somePluginId:someDirectoryRelatedOptionKey": "__codegen_dir__", + }, + }, + + ``` + Each plugin source path will be prefixed with `-Xplugin=` and passed as extra + arguments to the compiler. Plugin options will be appended after its plugin with `-P`. + + A specific example is, if you want to use [kotlinx.serialization](https://github.com/Kotlin/kotlinx.serialization) + with `kotlin_library()`, you need to specify `kotlinx-serialization-compiler-plugin.jar` under `kotlin_compiler_plugins` and `kotlinx-serialization-runtime.jar` (which you may have to fetch from Maven) in your `deps`: + + ``` + + kotlin_library( + name = "example", + srcs = glob(["*.kt"]), + deps = [ + ":kotlinx-serialization-runtime", + ], + kotlin_compiler_plugins = { + # Likely copied from your $KOTLIN_HOME directory. + "kotlinx-serialization-compiler-plugin.jar": {}, + }, + ) + + prebuilt_jar( + name = "kotlinx-serialization-runtime", + binary_jar = ":kotlinx-serialization-runtime-0.10.0", + ) + + # Note you probably want to set + # maven_repo=http://jcenter.bintray.com/ in your .buckconfig until + # https://github.com/Kotlin/kotlinx.serialization/issues/64 + # is closed. + remote_file( + name = "kotlinx-serialization-runtime-0.10.0", + out = "kotlinx-serialization-runtime-0.10.0.jar", + url = "mvn:org.jetbrains.kotlinx:kotlinx-serialization-runtime:jar:0.10.0", + sha1 = "23d777a5282c1957c7ce35946374fff0adab114c" + ) + + ``` + """), + } + +def _javac(): + return { + "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None, doc = """ + Specifies the Java compiler program to use for this rule. + The value is a source path or an execution dep (e.g., //foo/bar:bar). + Overrides the value in "javac" in the "tools" section + of `.buckconfig`. + """), + } + jvm_common = struct( test_env = _test_env, resources_arg = _resources_arg, @@ -166,4 +257,8 @@ jvm_common = struct( required_for_source_only_abi = _required_for_source_only_abi, on_unused_dependencies = _on_unused_dependencies, k2 = _k2, + incremental = _incremental, + plugins = _plugins, + kotlin_compiler_plugins = _kotlin_compiler_plugins, + javac = _javac, ) diff --git a/prelude/decls/kotlin_rules.bzl b/prelude/decls/kotlin_rules.bzl index ddd4d92a878..da1a8e00184 100644 --- a/prelude/decls/kotlin_rules.bzl +++ b/prelude/decls/kotlin_rules.bzl @@ -90,65 +90,6 @@ kotlin_library = prelude_rule( Rules (usually other `kotlin_library` rules) that are used to generate the classpath required to compile this `kotlin_library`. """), - "kotlin_compiler_plugins": attrs.dict(key = attrs.source(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}, doc = """ - Use this to specify [Kotlin compiler plugins](https://kotlinlang.org/docs/reference/compiler-plugins.html) to use when compiling this library. - This takes a map, with each entry specify one plugin. Entry's key is plugin source path, - and value is a map of plugin option key value pair. Unlike `extra_kotlinc_arguments`, - these can be *source paths*, not just strings. - - A special option value is - `__codegen_dir__`, in which case Buck will provide a default codegen folder's path as - option value instead. - E.g. - - ``` - - kotlin_compiler_plugins = { - "somePluginSourcePath": { - "plugin:somePluginId:somePluginOptionKey": "somePluginOptionValue", - "plugin:somePluginId:someDirectoryRelatedOptionKey": "__codegen_dir__", - }, - }, - - ``` - Each plugin source path will be prefixed with `-Xplugin=` and passed as extra - arguments to the compiler. Plugin options will be appended after its plugin with `-P`. - - A specific example is, if you want to use [kotlinx.serialization](https://github.com/Kotlin/kotlinx.serialization) - with `kotlin_library()`, you need to specify `kotlinx-serialization-compiler-plugin.jar` under `kotlin_compiler_plugins` and `kotlinx-serialization-runtime.jar` (which you may have to fetch from Maven) in your `deps`: - - ``` - - kotlin_library( - name = "example", - srcs = glob(["*.kt"]), - deps = [ - ":kotlinx-serialization-runtime", - ], - kotlin_compiler_plugins = { - # Likely copied from your $KOTLIN_HOME directory. - "kotlinx-serialization-compiler-plugin.jar": {}, - }, - ) - - prebuilt_jar( - name = "kotlinx-serialization-runtime", - binary_jar = ":kotlinx-serialization-runtime-0.10.0", - ) - - # Note you probably want to set - # maven_repo=http://jcenter.bintray.com/ in your .buckconfig until - # https://github.com/Kotlin/kotlinx.serialization/issues/64 - # is closed. - remote_file( - name = "kotlinx-serialization-runtime-0.10.0", - out = "kotlinx-serialization-runtime-0.10.0.jar", - url = "mvn:org.jetbrains.kotlinx:kotlinx-serialization-runtime:jar:0.10.0", - sha1 = "23d777a5282c1957c7ce35946374fff0adab114c" - ) - - ``` - """), "extra_kotlinc_arguments": attrs.list(attrs.string(), default = [], doc = """ List of additional arguments to pass into the Kotlin compiler. """), @@ -169,7 +110,10 @@ kotlin_library = prelude_rule( jvm_common.provided_deps() | jvm_common.exported_provided_deps() | jvm_common.k2() | - buck.labels_arg() | + jvm_common.kotlin_compiler_plugins() | + jvm_common.incremental() | + jvm_common.plugins() | + jvm_common.javac() | buck.labels_arg() | { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), "annotation_processor_deps": attrs.list(attrs.dep(), default = []), @@ -178,16 +122,13 @@ kotlin_library = prelude_rule( "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "extra_arguments": attrs.list(attrs.string(), default = []), - "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "java_version": attrs.option(attrs.string(), default = None), - "javac": attrs.option(attrs.source(), default = None), "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), "licenses": attrs.list(attrs.source(), default = []), "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), "never_mark_as_unused_dependency": attrs.option(attrs.bool(), default = None), "on_unused_dependencies": attrs.option(attrs.enum(UnusedDependenciesAction), default = None), - "plugins": attrs.list(attrs.dep(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), "required_for_source_only_abi": attrs.bool(default = False), "runtime_deps": attrs.list(attrs.dep(), default = []), @@ -196,8 +137,7 @@ kotlin_library = prelude_rule( "source_only_abi_deps": attrs.list(attrs.dep(), default = []), "target": attrs.option(attrs.string(), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), - } + } | jvm_common.plugins() ), ) @@ -211,6 +151,7 @@ kotlin_test = prelude_rule( further = None, attrs = ( # @unsorted-dict-items + buck.inject_test_env_arg() | { "srcs": attrs.list(attrs.source(), default = [], doc = """ Like ``kotlin_library()``, @@ -225,13 +166,13 @@ kotlin_test = prelude_rule( `glob(['**/*Test.kt'])`. """), "resources": attrs.list(attrs.source(), default = [], doc = """ - Same as `kotlin\\_library()`. + Same as `kotlin_library()`. """), } | buck.test_label_arg() | { "deps": attrs.list(attrs.dep(), default = [], doc = """ - Same as `kotlin\\_library()`. + Same as `kotlin_library()`. // org.junit.rules.Timeout was not introduced until 4.7. Must include JUnit (version 4.7 or later) as a dependency for JUnit tests. Must include TestNG (version 6.2 or later) and hamcrest as a dependencies for TestNG tests. @@ -259,7 +200,10 @@ kotlin_test = prelude_rule( """), } | jvm_common.k2() | + jvm_common.kotlin_compiler_plugins() | + jvm_common.incremental() | jvm_common.test_env() | + jvm_common.javac() | { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), "annotation_processing_tool": attrs.option(attrs.enum(AnnotationProcessingTool), default = None), @@ -275,12 +219,9 @@ kotlin_test = prelude_rule( "exported_provided_deps": attrs.list(attrs.dep(), default = []), "extra_arguments": attrs.list(attrs.string(), default = []), "extra_kotlinc_arguments": attrs.list(attrs.string(), default = []), - "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "friend_paths": attrs.list(attrs.dep(), default = []), "java_version": attrs.option(attrs.string(), default = None), "java": attrs.option(attrs.dep(), default = None), - "javac": attrs.option(attrs.source(), default = None), - "kotlin_compiler_plugins": attrs.dict(key = attrs.source(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "licenses": attrs.list(attrs.source(), default = []), "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), @@ -302,7 +243,6 @@ kotlin_test = prelude_rule( "use_cxx_libraries": attrs.option(attrs.bool(), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) diff --git a/prelude/decls/lua_rules.bzl b/prelude/decls/lua_rules.bzl index d46e9bf85a2..068745021ad 100644 --- a/prelude/decls/lua_rules.bzl +++ b/prelude/decls/lua_rules.bzl @@ -112,7 +112,7 @@ lua_binary = prelude_rule( name = "lua_binary", docs = """ A `lua_library()` rule is used to group together Lua sources - to be packaged into a top-level `lua\\_binary()`rule. + to be packaged into a top-level `lua_binary()` rule. """, examples = """ ``` @@ -140,7 +140,7 @@ lua_binary = prelude_rule( The module which serves as the entry point for this rule. """), "deps": attrs.list(attrs.dep(), default = [], doc = """ - `lua\\_library()`rules to this binary will access. + `lua_library()` rules to this binary will access. """), "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -159,7 +159,7 @@ lua_library = prelude_rule( name = "lua_library", docs = """ A `lua_library()` rule is used to group together Lua sources - to be packaged into a top-level `lua\\_binary()`rule. + to be packaged into a top-level `lua_binary()` rule. """, examples = """ ``` diff --git a/prelude/decls/native_common.bzl b/prelude/decls/native_common.bzl index 929c75602b1..acc501a9723 100644 --- a/prelude/decls/native_common.bzl +++ b/prelude/decls/native_common.bzl @@ -46,10 +46,22 @@ def _link_group_public_deps_label(): """), } +def _soname(): + return { + "soname": attrs.option(attrs.string(), default = None, doc = """ + Sets the soname ("shared object name") of any shared library produced from this rule. + The default value is based on the full rule name. + The macro `$(ext)` will be replaced with a platform-appropriate extension. + An argument can be provided, which is a library version. + For example `soname = 'libfoo.$(ext 2.3)'` will be `libfoo.2.3.dylib` on Mac and `libfoo.so.2.3` on Linux. +"""), + } + native_common = struct( link_group_deps = _link_group_deps, link_group_public_deps_label = _link_group_public_deps_label, link_style = _link_style, link_whole = _link_whole, preferred_linkage = _preferred_linkage, + soname = _soname, ) diff --git a/prelude/decls/python_common.bzl b/prelude/decls/python_common.bzl index c792a7aaafc..3a3b5b279b4 100644 --- a/prelude/decls/python_common.bzl +++ b/prelude/decls/python_common.bzl @@ -27,7 +27,7 @@ def _platform_srcs_arg(): regex against which the platform name is matched, and the second element is a list of source files. The regex should use `java.util.regex.Pattern` syntax. The platform name is a Python platform *flavor* defined in - the buckconfig#`python`section of `.buckconfig`. + the buckconfig#`python` section of `.buckconfig`. """), } @@ -49,7 +49,7 @@ def _platform_resources_arg(): regex against which the platform name is matched, and the second element is a list of resource files. The regex should use `java.util.regex.Pattern` syntax. The platform name is a Python platform *flavor* defined in - the buckconfig#`python`section of `.buckconfig`. + the buckconfig#`python `section of `.buckconfig`. """), } @@ -75,9 +75,9 @@ def _linker_flags_arg(): "linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = [], doc = """ Additional linker flags that should be applied to any linking which is specific to this rule. Note that whether these flags are used is dependent on the native link strategy selected in - `.buckconfig` and currently applies only to the `merged` ``.buckconfig``; + `.buckconfig` and currently applies only to the merged `.buckconfig`; the `separate` link strategy pulls in shared libraries that are linked in the - context of the rules that own them, such as `cxx\\_library()`. + context of the rules that own them, such as `cxx_library()`. """), } diff --git a/prelude/decls/python_rules.bzl b/prelude/decls/python_rules.bzl index 53721a538b4..c1995734802 100644 --- a/prelude/decls/python_rules.bzl +++ b/prelude/decls/python_rules.bzl @@ -126,7 +126,7 @@ prebuilt_python_library = prelude_rule( name = "prebuilt_python_library", docs = """ A `prebuilt_python_library()` rule is used to include prebuilt python packages into the output of a - top-level `python_binary()`or `python_test()`rule. + top-level `python_binary()` or `python_test()` rule. These prebuilt libraries can either be [whl files](https://www.python.org/dev/peps/pep-0427/) or eggs @@ -191,9 +191,21 @@ prebuilt_python_library = prelude_rule( { "compile": attrs.bool(default = False), "contacts": attrs.list(attrs.string(), default = []), + "cxx_header_dirs": attrs.option(attrs.list(attrs.string()), default = None), + "infer_cxx_header_dirs": attrs.bool(default = False), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "ignore_compile_errors": attrs.bool(default = False), "licenses": attrs.list(attrs.source(), default = []), + "strip_soabi_tags": attrs.bool( + default = False, + doc = """ + Strip the SOABI tags from extensions in the prebuilt library. + + Note that this should be considered unsafe, as it removes builtin + protections that fail fast when a potententially incompatible + native extension is imported. + """, + ), } ), ) @@ -278,6 +290,8 @@ python_binary = prelude_rule( "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg(anon_target_compatible = True))), default = []), "platform_preload_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = False)), default = []), + "repl_only_deps": attrs.list(attrs.dep(), default = []), + "repl_main": attrs.option(attrs.string(), default = None), "prefer_stripped_native_objects": attrs.bool(default = False), "version_universe": attrs.option(attrs.string(), default = None), "zip_safe": attrs.option(attrs.bool(), default = None), @@ -396,6 +410,7 @@ python_test = prelude_rule( further = None, attrs = ( # @unsorted-dict-items + buck.inject_test_env_arg() | buck.labels_arg() | python_common.srcs_arg() | python_common.platform_srcs_arg() | @@ -456,6 +471,8 @@ python_test = prelude_rule( "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg(anon_target_compatible = True))), default = []), "platform_preload_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = False)), default = []), + "repl_only_deps": attrs.list(attrs.dep(), default = []), + "repl_main": attrs.option(attrs.string(), default = None), "prefer_stripped_native_objects": attrs.bool(default = False), "runner": attrs.option(attrs.dep(), default = None), "specs": attrs.option(attrs.arg(json = True), default = None), diff --git a/prelude/decls/re_test_common.bzl b/prelude/decls/re_test_common.bzl index c4c4cd9343c..4a9e7240875 100644 --- a/prelude/decls/re_test_common.bzl +++ b/prelude/decls/re_test_common.bzl @@ -15,9 +15,12 @@ def _opts_for_tests_arg() -> Attr: # { # "capabilities": Dict | None # "listing_capabilities": Dict | None + # "local_listing_enabled": bool | None + # "local_enabled": bool | None # "use_case": str | None # "remote_cache_enabled": bool | None # "dependencies": list> | [] + # "resource_units": int | None # } return attrs.dict( key = attrs.string(), @@ -31,6 +34,7 @@ def _opts_for_tests_arg() -> Attr: attrs.string(), attrs.bool(), attrs.list(attrs.dict(key = attrs.string(), value = attrs.string()), default = []), + attrs.int(), ), # TODO(cjhopman): I think this default does nothing, it should be deleted default = None, diff --git a/prelude/decls/rust_common.bzl b/prelude/decls/rust_common.bzl index 0aa594c2aa0..36602720257 100644 --- a/prelude/decls/rust_common.bzl +++ b/prelude/decls/rust_common.bzl @@ -70,6 +70,18 @@ def _linker_flags_arg(): """), } +def _exported_linker_flags_arg(): + return { + "exported_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = [], doc = """ + A set of additional flag to pass before this item on the link line, even if + this items is compiled to a DSO. +"""), + "exported_post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = [], doc = """ + A set of additional flag to pass after this item on the link line, even if + this items is compiled to a DSO. +"""), + } + def _crate(crate_type): return { "crate": crate_type, @@ -83,6 +95,15 @@ def _crate_root(): """), } +def _default_roots_arg(): + return { + "default_roots": attrs.option(attrs.list(attrs.string()), default = None, doc = """ + Set the candidate source names to consider for crate root. Typically used to disambiguate between + lib.rs or main.rs for rust_test, which may be declare a test suite for either library or binary + rules. Has no effect if an explicit `crate_root` is provided. +"""), + } + def _env_arg(): return { "env": attrs.dict(key = attrs.string(), value = attrs.arg(), sorted = False, default = {}, doc = """ @@ -91,6 +112,23 @@ def _env_arg(): """), } +def _run_env_arg(): + return { + "run_env": attrs.dict(key = attrs.string(), value = attrs.arg(), sorted = False, default = {}, doc = """ + Set environment variables during test execution. The environment variable values may + include macros which are expanded. +"""), + } + +def _build_and_run_env_arg(): + # Same as env_arg(), but with different documentation. + return { + "env": attrs.dict(key = attrs.string(), value = attrs.arg(), sorted = False, default = {}, doc = """ + Set environment variables for this rule's invocations of rustc *and* during execution of + the tests. The environment variable values may include macros which are expanded. +"""), + } + def _mapped_srcs_arg(): return { "mapped_srcs": attrs.dict(key = attrs.source(), value = attrs.string(), sorted = False, default = {}, doc = """ @@ -106,7 +144,7 @@ def _mapped_srcs_arg(): def _named_deps_arg(is_binary: bool): return { - "named_deps": attrs.dict(key = attrs.string(), value = rust_target_dep(is_binary), sorted = False, default = {}, doc = """ + "named_deps": attrs.one_of(attrs.dict(key = attrs.string(), value = rust_target_dep(is_binary), sorted = False), attrs.list(attrs.tuple(attrs.arg(), rust_target_dep(is_binary))), default = {}, doc = """ Add crate dependencies and define a local name by which to use that dependency by. This allows a crate to have multiple dependencies with the same crate name. For example: `named_deps = {"local_name", ":some_rust_crate" }`. @@ -140,9 +178,13 @@ rust_common = struct( edition_arg = _edition_arg, rustc_flags_arg = _rustc_flags_arg, linker_flags_arg = _linker_flags_arg, + exported_linker_flags_arg = _exported_linker_flags_arg, crate = _crate, crate_root = _crate_root, + default_roots_arg = _default_roots_arg, env_arg = _env_arg, + run_env_arg = _run_env_arg, + build_and_run_env_arg = _build_and_run_env_arg, mapped_srcs_arg = _mapped_srcs_arg, named_deps_arg = _named_deps_arg, rust_toolchain_arg = _rust_toolchain_arg, diff --git a/prelude/decls/rust_rules.bzl b/prelude/decls/rust_rules.bzl index 703436c81d8..b60d9748766 100644 --- a/prelude/decls/rust_rules.bzl +++ b/prelude/decls/rust_rules.bzl @@ -5,70 +5,20 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") +load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//rust:clippy_configuration.bzl", "ClippyConfiguration") load("@prelude//rust:link_info.bzl", "RustProcMacroPlugin") load("@prelude//rust:rust_binary.bzl", "rust_binary_impl", "rust_test_impl") -load("@prelude//rust:rust_library.bzl", "prebuilt_rust_library_impl", "rust_library_impl") -load(":common.bzl", "Linkage", "buck", "prelude_rule") +load("@prelude//rust:rust_library.bzl", "rust_library_impl") +load(":common.bzl", "buck", "prelude_rule") load(":native_common.bzl", "native_common") load(":re_test_common.bzl", "re_test_common") load(":rust_common.bzl", "rust_common", "rust_target_dep") -prebuilt_rust_library = prelude_rule( - name = "prebuilt_rust_library", - impl = prebuilt_rust_library_impl, - docs = """ - A prebuilt\\_rust\\_library() specifies a pre-built Rust crate, and any dependencies - it may have on other crates (typically also prebuilt). - - - Note: Buck is currently tested with (and therefore supports) version 1.32.0 of Rust. - """, - examples = """ - ``` - - prebuilt_rust_library( - name = 'dailygreet', - rlib = 'libdailygreet.rlib', - deps = [ - ':jinsy', - ], - ) - - prebuilt_rust_library( - name = 'jinsy', - rlib = 'libarbiter-6337e9cb899bd295.rlib', - ) - - ``` - """, - further = None, - attrs = ( - # @unsorted-dict-items - { - "rlib": attrs.source(doc = """ - Path to the precompiled Rust crate - typically of the form 'libfoo.rlib', or - 'libfoo-abc123def456.rlib' if it has symbol versioning metadata. - """), - } | - native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage, default = "any")) | - rust_common.crate(crate_type = attrs.string(default = "")) | - rust_common.deps_arg(is_binary = False) | - { - "contacts": attrs.list(attrs.string(), default = []), - "default_host_platform": attrs.option(attrs.configuration_label(), default = None), - "labels": attrs.list(attrs.string(), default = []), - "licenses": attrs.list(attrs.source(), default = []), - "proc_macro": attrs.bool(default = False), - } | - rust_common.cxx_toolchain_arg() | - rust_common.rust_toolchain_arg() - ), - uses_plugins = [RustProcMacroPlugin], -) - def _rust_common_attributes(is_binary: bool): return { + "clippy_configuration": attrs.option(attrs.dep(providers = [ClippyConfiguration]), default = None), "contacts": attrs.list(attrs.string(), default = []), "coverage": attrs.bool(default = False), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -89,7 +39,6 @@ def _rust_binary_attrs_group(prefix: str) -> dict[str, Attr]: attrs = (rust_common.deps_arg(is_binary = True) | rust_common.named_deps_arg(is_binary = True) | rust_common.linker_flags_arg() | - rust_common.env_arg() | native_common.link_style()) return {prefix + name: v for name, v in attrs.items()} @@ -102,7 +51,7 @@ _RUST_EXECUTABLE_ATTRIBUTES = { "enable_distributed_thinlto": attrs.bool(default = False), # Required by the rules but not supported, since Rust is auto-link groups only "link_group": attrs.default_only(attrs.option(attrs.string(), default = None)), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_group_min_binary_node_count": attrs.option(attrs.int(), default = None), "rpath": attrs.bool(default = False, doc = """ Set the "rpath" in the executable when using a shared link style. @@ -120,9 +69,7 @@ rust_binary = prelude_rule( If you invoke a build with the `check` flavor, then Buck will invoke rustc to check the code (typecheck, produce warnings, etc), but won't generate an executable code. When applied to binaries it produces no output; for libraries it produces metadata for - consumers of the library. When building with `check`, extra compiler flags from - the `rust.rustc_check_flags` are added to the compiler's command line options, - to allow for extra warnings, etc. + consumers of the library. Note: Buck is currently tested with (and therefore supports) version 1.32.0 of Rust. @@ -172,6 +119,7 @@ rust_binary = prelude_rule( rust_common.rustc_flags_arg() | rust_common.crate(crate_type = attrs.option(attrs.string(), default = None)) | rust_common.crate_root() | + rust_common.env_arg() | _rust_binary_attrs_group(prefix = "") | _rust_common_attributes(is_binary = True) | _RUST_EXECUTABLE_ATTRIBUTES | @@ -194,9 +142,7 @@ rust_library = prelude_rule( If you invoke a build with the `check` flavor, then Buck will invoke rustc to check the code (typecheck, produce warnings, etc), but won't generate an executable code. When applied to binaries it produces no output; for libraries it produces metadata for - consumers of the library. When building with `check`, extra compiler flags from - the `rust.rustc_check_flags` are added to the compiler's command line options, - to allow for extra warnings, etc. + consumers of the library. Note: Buck is currently tested with (and therefore supports) version 1.32.0 of Rust. @@ -236,13 +182,17 @@ rust_library = prelude_rule( # don't have to know whether we're building a rust_binary or a # rust_library. rust_common.linker_flags_arg() | + rust_common.exported_linker_flags_arg() | rust_common.env_arg() | rust_common.crate(crate_type = attrs.option(attrs.string(), default = None)) | rust_common.crate_root() | - native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage, default = "any")) | + native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage.values(), default = "any")) | + native_common.soname() | + native_common.link_style() | _rust_common_attributes(is_binary = False) | { "crate_dynamic": attrs.option(attrs.dep(), default = None), + "doc_env": rust_common.env_arg()["env"], "doctests": attrs.option(attrs.bool(), default = None), "proc_macro": attrs.bool(default = False), "supports_python_dlopen": attrs.option(attrs.bool(), default = None), @@ -303,6 +253,7 @@ rust_test = prelude_rule( further = None, attrs = ( # @unsorted-dict-items + buck.inject_test_env_arg() | rust_common.srcs_arg() | rust_common.mapped_srcs_arg() | rust_common.edition_arg() | @@ -310,6 +261,9 @@ rust_test = prelude_rule( rust_common.rustc_flags_arg() | rust_common.crate(crate_type = attrs.option(attrs.string(), default = None)) | rust_common.crate_root() | + rust_common.default_roots_arg() | + rust_common.run_env_arg() | + rust_common.build_and_run_env_arg() | _rust_binary_attrs_group(prefix = "") | _rust_common_attributes(is_binary = True) | _RUST_EXECUTABLE_ATTRIBUTES | @@ -329,7 +283,6 @@ rust_test = prelude_rule( ) rust_rules = struct( - prebuilt_rust_library = prebuilt_rust_library, rust_binary = rust_binary, rust_library = rust_library, rust_test = rust_test, diff --git a/prelude/decls/scala_rules.bzl b/prelude/decls/scala_rules.bzl index 80c95edda63..e4482dad030 100644 --- a/prelude/decls/scala_rules.bzl +++ b/prelude/decls/scala_rules.bzl @@ -6,6 +6,7 @@ # of this source tree. load(":common.bzl", "AbiGenerationMode", "ForkMode", "LogLevel", "SourceAbiVerificationMode", "TestType", "UnusedDependenciesAction", "prelude_rule") +load(":jvm_common.bzl", "jvm_common") scala_library = prelude_rule( name = "scala_library", @@ -26,14 +27,12 @@ scala_library = prelude_rule( "exported_provided_deps": attrs.list(attrs.dep(), default = []), "extra_arguments": attrs.list(attrs.string(), default = []), "java_version": attrs.option(attrs.string(), default = None), - "javac": attrs.option(attrs.source(), default = None), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), "never_mark_as_unused_dependency": attrs.option(attrs.bool(), default = None), "on_unused_dependencies": attrs.option(attrs.enum(UnusedDependenciesAction), default = None), - "plugins": attrs.list(attrs.dep(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), "provided_deps": attrs.list(attrs.dep(), default = []), "remove_classes": attrs.list(attrs.regex(), default = []), @@ -46,8 +45,7 @@ scala_library = prelude_rule( "source_only_abi_deps": attrs.list(attrs.dep(), default = []), "srcs": attrs.list(attrs.source(), default = []), "target": attrs.option(attrs.string(), default = None), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), - } + } | jvm_common.plugins() | jvm_common.javac() ), ) @@ -75,14 +73,12 @@ scala_test = prelude_rule( "extra_arguments": attrs.list(attrs.string(), default = []), "fork_mode": attrs.enum(ForkMode, default = "none"), "java_version": attrs.option(attrs.string(), default = None), - "javac": attrs.option(attrs.source(), default = None), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), "never_mark_as_unused_dependency": attrs.option(attrs.bool(), default = None), "on_unused_dependencies": attrs.option(attrs.enum(UnusedDependenciesAction), default = None), - "plugins": attrs.list(attrs.dep(), default = []), "proguard_config": attrs.option(attrs.source(), default = None), "provided_deps": attrs.list(attrs.dep(), default = []), "remove_classes": attrs.list(attrs.regex(), default = []), @@ -104,8 +100,7 @@ scala_test = prelude_rule( "use_cxx_libraries": attrs.option(attrs.bool(), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), "vm_args": attrs.list(attrs.arg(), default = []), - "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), - } + } | jvm_common.plugins() | jvm_common.javac() ), ) diff --git a/prelude/decls/shell_rules.bzl b/prelude/decls/shell_rules.bzl index 6e8b5a7b84f..01d992975fe 100644 --- a/prelude/decls/shell_rules.bzl +++ b/prelude/decls/shell_rules.bzl @@ -160,6 +160,7 @@ sh_test = prelude_rule( further = None, attrs = ( # @unsorted-dict-items + buck.inject_test_env_arg() | { "test": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None, doc = """ Either the path to the script (relative to the build file), or a `build target`. diff --git a/prelude/decls/toolchains_common.bzl b/prelude/decls/toolchains_common.bzl index 3204edb16cf..6cd2ad2c415 100644 --- a/prelude/decls/toolchains_common.bzl +++ b/prelude/decls/toolchains_common.bzl @@ -7,8 +7,8 @@ load("@prelude//android:android_toolchain.bzl", "AndroidPlatformInfo", "AndroidToolchainInfo") load("@prelude//csharp:toolchain.bzl", "CSharpToolchainInfo") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo") load("@prelude//go:toolchain.bzl", "GoToolchainInfo") +load("@prelude//go_bootstrap:go_bootstrap.bzl", "GoBootstrapToolchainInfo") load("@prelude//haskell:toolchain.bzl", "HaskellPlatformInfo", "HaskellToolchainInfo") load("@prelude//java:dex_toolchain.bzl", "DexToolchainInfo") load( @@ -28,14 +28,7 @@ load("@prelude//rust:rust_toolchain.bzl", "RustToolchainInfo") load("@prelude//tests:remote_test_execution_toolchain.bzl", "RemoteTestExecutionToolchainInfo") load("@prelude//zip_file:zip_file_toolchain.bzl", "ZipFileToolchainInfo") -def _toolchain(lang: str, providers: list[typing.Any], default_only = True) -> Attr: - toolchain = attrs.toolchain_dep(default = "toolchains//:" + lang, providers = providers) - if default_only: - return attrs.default_only(toolchain) - else: - return toolchain - -def _toolchain_with_override(lang: str, providers: list[typing.Any]) -> Attr: +def _toolchain(lang: str, providers: list[typing.Any]) -> Attr: return attrs.toolchain_dep(default = "toolchains//:" + lang, providers = providers) def _android_toolchain(): @@ -45,19 +38,23 @@ def _csharp_toolchain(): return _toolchain("csharp", [CSharpToolchainInfo]) def _cxx_toolchain(): - return _toolchain("cxx", [CxxToolchainInfo, CxxPlatformInfo]) + # `CxxToolchainInfo, CxxPlatformInfo`, but python doesn't require it + return _toolchain("cxx", []) def _dex_toolchain(): - return _toolchain_with_override("dex", [DexToolchainInfo]) + return _toolchain("dex", [DexToolchainInfo]) def _go_toolchain(): return _toolchain("go", [GoToolchainInfo]) +def _go_bootstrap_toolchain(): + return _toolchain("go_bootstrap", [GoBootstrapToolchainInfo]) + def _haskell_toolchain(): return _toolchain("haskell", [HaskellToolchainInfo, HaskellPlatformInfo]) def _java_toolchain(): - return _toolchain_with_override("java", [JavaToolchainInfo, JavaPlatformInfo]) + return _toolchain("java", [JavaToolchainInfo, JavaPlatformInfo]) def _java_for_android_toolchain(): return _toolchain("java_for_android", [JavaToolchainInfo, JavaPlatformInfo]) @@ -69,11 +66,11 @@ def _java_test_toolchain(): return _toolchain("java_test", [JavaTestToolchainInfo]) def _kotlin_toolchain(): - return _toolchain_with_override("kotlin", [KotlinToolchainInfo]) + return _toolchain("kotlin", [KotlinToolchainInfo]) def _prebuilt_jar_toolchain(): # Override is allowed for bootstrapping prebuilt jar toolchains - return _toolchain_with_override("prebuilt_jar", [PrebuiltJarToolchainInfo]) + return _toolchain("prebuilt_jar", [PrebuiltJarToolchainInfo]) def _python_toolchain(): return _toolchain("python", [PythonToolchainInfo, PythonPlatformInfo]) @@ -82,7 +79,7 @@ def _python_bootstrap_toolchain(): return _toolchain("python_bootstrap", [PythonBootstrapToolchainInfo]) def _rust_toolchain(): - return _toolchain("rust", [RustToolchainInfo], default_only = False) + return _toolchain("rust", [RustToolchainInfo]) def _zip_file_toolchain(): return _toolchain("zip_file", [ZipFileToolchainInfo]) @@ -96,6 +93,7 @@ toolchains_common = struct( cxx = _cxx_toolchain, dex = _dex_toolchain, go = _go_toolchain, + go_bootstrap = _go_bootstrap_toolchain, haskell = _haskell_toolchain, java = _java_toolchain, java_for_android = _java_for_android_toolchain, diff --git a/prelude/erlang/applications/BUCK.v2 b/prelude/erlang/applications/BUCK.v2 index 1148a21894c..b476ddc2592 100644 --- a/prelude/erlang/applications/BUCK.v2 +++ b/prelude/erlang/applications/BUCK.v2 @@ -1,3 +1,8 @@ load("@prelude//erlang:erlang_otp_application.bzl", "gen_otp_applications") +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() gen_otp_applications() diff --git a/prelude/erlang/common_test/.elp.toml b/prelude/erlang/common_test/.elp.toml index e4ffd17dcf3..81c33a6a09a 100644 --- a/prelude/erlang/common_test/.elp.toml +++ b/prelude/erlang/common_test/.elp.toml @@ -1,4 +1,4 @@ [buck] build_deps = false enabled = true -included_targets = ["//erlang/common_test/..."] +included_targets = ["prelude//erlang/common_test/..."] diff --git a/prelude/erlang/common_test/common/BUCK.v2 b/prelude/erlang/common_test/common/BUCK.v2 index 7bd533f7a36..fbd20f2c1a2 100644 --- a/prelude/erlang/common_test/common/BUCK.v2 +++ b/prelude/erlang/common_test/common/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + erlang_application( name = "common", srcs = glob([ diff --git a/prelude/erlang/common_test/common/include/buck_ct_records.hrl b/prelude/erlang/common_test/common/include/buck_ct_records.hrl index 1f1ebdd3ef0..b9dcf9ec1a7 100644 --- a/prelude/erlang/common_test/common/include/buck_ct_records.hrl +++ b/prelude/erlang/common_test/common/include/buck_ct_records.hrl @@ -8,12 +8,12 @@ %% % @format -record(test_info, { - dependencies :: [string()], - test_suite :: string(), - config_files :: [string()], + dependencies :: [file:filename()], + test_suite :: binary(), + config_files :: [binary()], providers :: [{atom(), [term()]}], ct_opts :: [term()], - erl_cmd :: string(), + erl_cmd :: [binary()], extra_flags :: [string()], common_app_env :: #{string() => string()}, artifact_annotation_mfa :: artifact_annotations:annotation_function() @@ -31,7 +31,7 @@ providers :: [{module(), [term()]}], ct_opts :: [term()], common_app_env :: #{string() => string()}, - erl_cmd :: string(), + erl_cmd :: [binary()], extra_flags :: [string()], artifact_annotation_mfa :: artifact_annotations:annotation_function() }). diff --git a/prelude/erlang/common_test/common/src/artifact_annotations.erl b/prelude/erlang/common_test/common/src/artifact_annotations.erl index 1b823c7cdea..2bf533bfc9d 100644 --- a/prelude/erlang/common_test/common/src/artifact_annotations.erl +++ b/prelude/erlang/common_test/common/src/artifact_annotations.erl @@ -37,7 +37,7 @@ -export_type([annotation_function/0]). -spec serialize(test_result_artifact_annotations()) -> binary(). -serialize(ArtifactAnnotation) -> jsone:encode(ArtifactAnnotation). +serialize(ArtifactAnnotation) -> json:encode(ArtifactAnnotation). -spec create_artifact_annotation(file:filename(), #test_env{}) -> test_result_artifact_annotations(). create_artifact_annotation(FileName, TestEnv) -> diff --git a/prelude/erlang/common_test/common/src/buck_ct_parser.erl b/prelude/erlang/common_test/common/src/buck_ct_parser.erl index 879ec1fff3f..b0488dce499 100644 --- a/prelude/erlang/common_test/common/src/buck_ct_parser.erl +++ b/prelude/erlang/common_test/common/src/buck_ct_parser.erl @@ -18,12 +18,12 @@ %% Public API -export([parse_str/1]). --spec parse_str(string()) -> term(). -parse_str("") -> +-spec parse_str(binary()) -> term(). +parse_str(<<"">>) -> []; parse_str(StrArgs) -> try - {ok, Tokens, _} = erl_scan:string(StrArgs ++ "."), + {ok, Tokens, _} = erl_scan:string(unicode:characters_to_list([StrArgs, "."])), erl_parse:parse_term(Tokens) of {ok, Term} -> @@ -34,7 +34,7 @@ parse_str(StrArgs) -> E:R:S -> error( lists:flatten( - io_lib:format("Error parsing StrArgs ~p, error ~p", [StrArgs, erl_error:format_exception(E, R, S)]) + io_lib:format("Error parsing StrArgs ~p, error ~ts", [StrArgs, erl_error:format_exception(E, R, S)]) ) ) end. diff --git a/prelude/erlang/common_test/common/src/buck_ct_provider.erl b/prelude/erlang/common_test/common/src/buck_ct_provider.erl index 2d04eb4dd55..3807f112304 100644 --- a/prelude/erlang/common_test/common/src/buck_ct_provider.erl +++ b/prelude/erlang/common_test/common/src/buck_ct_provider.erl @@ -115,7 +115,7 @@ execute_method_on_provider(Method, ProviderName, ProviderState, Args) -> {error, Reason} -> ErrorMsg = unicode:characters_to_list( io_lib:format( - "Method ~p on provider ~p with sate ~p ~n returned with error ~p ~n", [ + "Method ~p on provider ~p with state ~p ~n returned with error ~p ~n", [ Method, ProviderName, ProviderState, Reason ] ) @@ -138,7 +138,7 @@ execute_method_on_provider(Method, ProviderName, ProviderState, Args) -> catch Class:Reason:StackTrace -> ErrorMsg = unicode:characters_to_list( - io_lib:format("Method ~p on provider ~p with sate ~p ~n ~s ~n", [ + io_lib:format("Method ~p on provider ~p with state ~p ~n ~s ~n", [ Method, ProviderName, ProviderState, diff --git a/prelude/erlang/common_test/common/src/ct_error_printer.erl b/prelude/erlang/common_test/common/src/ct_error_printer.erl index 2a4b59961b9..2bfe18251e5 100644 --- a/prelude/erlang/common_test/common/src/ct_error_printer.erl +++ b/prelude/erlang/common_test/common/src/ct_error_printer.erl @@ -98,7 +98,7 @@ maybe_custom_format({{Type, Props = #{formatter := Formatter}}, _StackTrace}) wh "~s~n", [erl_error:format_exception(E, R, ST)] ), - io_lib:format("original assertion: ~n" "~p~n", {Type, Props}) + io_lib:format("original assertion: ~n" "~p~n", [{Type, Props}]) ]} end; maybe_custom_format(_Reason) -> diff --git a/prelude/erlang/common_test/common/src/execution_logs.erl b/prelude/erlang/common_test/common/src/execution_logs.erl deleted file mode 100644 index c809394ad87..00000000000 --- a/prelude/erlang/common_test/common/src/execution_logs.erl +++ /dev/null @@ -1,154 +0,0 @@ -%% Copyright (c) Meta Platforms, Inc. and affiliates. -%% -%% This source code is licensed under both the MIT license found in the -%% LICENSE-MIT file in the root directory of this source tree and the Apache -%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory -%% of this source tree. - -%%%------------------------------------------------------------------- -%%% @doc -%%% Search in the execution directory produced by buck2 test -%%% for relevant logs to display to the user. -%%% Link them into a temporary directory, and produce a json output -%%% that lists them. -%%% @end -%%% % @format - --module(execution_logs). - --compile(warn_missing_spec). - -%% Public API --export([create_dir_summary/1]). - --type key() :: - buck2_exec_dir | log_private | suite_html | scuba_link | test_log_json | ct_log | ct_stdout. - --type key_entry() :: {key(), string()} | not_found. - --spec create_dir_summary(file:filename()) -> #{atom() => binary()}. -create_dir_summary(ExecDir) -> - TempDir = create_temp_directory(), - Funcs = [ - fun add_test_log/2, - fun add_test_log_json/2, - fun add_suite_html/2, - fun add_log_private/2, - fun add_exec_dir/2, - fun add_ct_log/2, - fun add_ct_stdout/2 - ], - lists:foldl( - fun(Func, Map) -> - case Func(TempDir, ExecDir) of - not_found -> Map; - {Key, Path} -> Map#{Key => list_to_binary(Path)} - end - end, - #{}, - Funcs - ). - --spec add_ct_log(file:filename(), file:filename()) -> key_entry(). -add_ct_log(TempDir, ExecDir) -> - case find_pattern(ExecDir, "ct_executor.log", file) of - {error, _} -> - not_found; - TestLogJson -> - file:make_symlink(TestLogJson, filename:join(TempDir, "ct.log")), - {ct_log, filename:join(TempDir, "ct.log")} - end. - --spec add_ct_stdout(file:filename(), file:filename()) -> key_entry(). -add_ct_stdout(TempDir, ExecDir) -> - case find_pattern(ExecDir, "ct_executor.stdout.txt", file) of - {error, _} -> - not_found; - TestLogJson -> - file:make_symlink(TestLogJson, filename:join(TempDir, "ct.stdout")), - {ct_stdout, filename:join(TempDir, "ct.stdout")} - end. - --spec add_test_log(file:filename(), file:filename()) -> key_entry(). -add_test_log(TempDir, ExecDir) -> - case find_pattern(ExecDir, "**/test.log", file) of - {error, _} -> - not_found; - TestLogJson -> - file:make_symlink(TestLogJson, filename:join(TempDir, "test.log")), - {test_log, filename:join(TempDir, "test.log")} - end. - --spec add_test_log_json(file:filename(), file:filename()) -> key_entry(). -add_test_log_json(TempDir, ExecDir) -> - case find_pattern(ExecDir, "**/test.log.json", file) of - {error, _} -> - not_found; - TestLogJson -> - file:make_symlink(TestLogJson, filename:join(TempDir, "test.log.json")), - {test_log_json, filename:join(TempDir, "test.log.json")} - end. - --spec add_suite_html(file:filename(), file:filename()) -> key_entry(). -add_suite_html(TempDir, ExecDir) -> - case find_pattern(ExecDir, "**/suite.log.html", file) of - {error, _} -> - not_found; - SuiteHtml -> - file:make_symlink(filename:dirname(SuiteHtml), filename:join(TempDir, "htmls")), - {suite_html, filename:join([TempDir, "htmls", "suite.log.html"])} - end. - --spec add_log_private(file:filename(), file:filename()) -> key_entry(). -add_log_private(TempDir, ExecDir) -> - case find_pattern(ExecDir, "**/log_private", folder) of - {error, _} -> - not_found; - LogPrivate -> - file:make_symlink(LogPrivate, filename:join(TempDir, "log_private")), - {log_private, filename:join(TempDir, "log_private")} - end. - --spec add_exec_dir(file:filename(), file:filename()) -> key_entry(). -add_exec_dir(TempDir, ExecDir) -> - file:make_symlink(ExecDir, filename:join(TempDir, "exec_dir")), - {buck2_exec_dir, filename:join(TempDir, "exec_dir")}. - --spec create_temp_directory() -> file:filename(). -create_temp_directory() -> - RootTmpDir = - case os:getenv("TEMPDIR") of - false -> - NewTmpDir = os:cmd("mktemp"), - filename:dirname(NewTmpDir); - Dir -> - Dir - end, - {_, _, Micro} = TS = os:timestamp(), - {{_Year, Month, Day}, {Hour, Minute, Second}} = calendar:now_to_universal_time(TS), - DateTime = unicode:characters_to_list( - io_lib:format("~2..0w.~2..0wT~2..0w.~2..0w.~2..0w.~w", [ - Month, Day, Hour, Minute, Second, Micro - ]) - ), - is_list(DateTime) orelse error(uncode_format_failed, DateTime), - TmpDir = filename:join([RootTmpDir, "buck2_test_logs", DateTime]), - filelib:ensure_path(TmpDir), - TmpDir. - --spec find_pattern(file:filename(), file:filename(), file | folder) -> - {error, not_found} | file:filename(). -find_pattern(ExecDir, Pattern, FolderOrFile) -> - Func = - case FolderOrFile of - folder -> fun filelib:is_dir/1; - file -> fun filelib:is_regular/1 - end, - Candidates = [ - Path - || Path <- filelib:wildcard(filename:join(ExecDir, Pattern)), Func(Path) - ], - case Candidates of - [] -> {error, not_found}; - [LogPrivate | _Tail] -> LogPrivate - end. diff --git a/prelude/erlang/common_test/cth_hooks/BUCK.v2 b/prelude/erlang/common_test/cth_hooks/BUCK.v2 index 631e312d51a..cca3d1d4d44 100644 --- a/prelude/erlang/common_test/cth_hooks/BUCK.v2 +++ b/prelude/erlang/common_test/cth_hooks/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + erlang_application( name = "cth_hooks", srcs = glob([ diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl index fadc41f3849..43915817df9 100644 --- a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl +++ b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl @@ -1,3 +1,10 @@ +%%% Copyright (c) Meta Platforms, Inc. and affiliates. +%%% +%%% This source code is licensed under both the MIT license found in the +%%% LICENSE-MIT file in the root directory of this source tree and the Apache +%%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%%% of this source tree. + -module(cth_tpx_role). % -------- What are cth_tpx roles?? --------------- diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl index 44390210617..90db84fb1fc 100644 --- a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl +++ b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl @@ -1,3 +1,10 @@ +%%% Copyright (c) Meta Platforms, Inc. and affiliates. +%%% +%%% This source code is licensed under both the MIT license found in the +%%% LICENSE-MIT file in the root directory of this source tree and the Apache +%%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%%% of this source tree. + -module(cth_tpx_server). -behaviour(gen_server). diff --git a/prelude/erlang/common_test/test_binary/BUCK.v2 b/prelude/erlang/common_test/test_binary/BUCK.v2 index 6e0b3e10aa8..9c18544dd57 100644 --- a/prelude/erlang/common_test/test_binary/BUCK.v2 +++ b/prelude/erlang/common_test/test_binary/BUCK.v2 @@ -1,17 +1,8 @@ -erlang_escript( - name = "escript", - emu_args = [ - "+A0", - "+S1:1", - "+sbtu", - "-mode minimal", - ], - main_module = "test_binary", - visibility = ["PUBLIC"], - deps = [ - ":test_binary", - ], -) +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() erlang_application( name = "test_binary", @@ -24,7 +15,6 @@ erlang_application( "stdlib", "syntax_tools", "xmerl", - "prelude//erlang/toolchain:resources[jsone]", "prelude//erlang/common_test/common:common", "prelude//erlang/common_test/cth_hooks:cth_hooks", "prelude//erlang/common_test/test_exec:test_exec", diff --git a/prelude/erlang/common_test/test_binary/src/json_interfacer.erl b/prelude/erlang/common_test/test_binary/src/json_interfacer.erl index 7e4ffc5d8d7..c10fdd6fbb5 100644 --- a/prelude/erlang/common_test/test_binary/src/json_interfacer.erl +++ b/prelude/erlang/common_test/test_binary/src/json_interfacer.erl @@ -72,7 +72,7 @@ write_json_output(OutputDir, TpxResults) -> -spec format_json([case_result()]) -> string(). format_json(TpxResults) -> - jsone:encode(lists:map(fun(CaseResult) -> format_case(CaseResult) end, TpxResults)). + json:encode(lists:map(fun(CaseResult) -> format_case(CaseResult) end, TpxResults)). -spec format_case([case_result()]) -> [formatted_case_result()]. format_case( diff --git a/prelude/erlang/common_test/test_binary/src/list_test.erl b/prelude/erlang/common_test/test_binary/src/list_test.erl index 04abc565997..cba1f20bcdf 100644 --- a/prelude/erlang/common_test/test_binary/src/list_test.erl +++ b/prelude/erlang/common_test/test_binary/src/list_test.erl @@ -93,8 +93,7 @@ test_exported_test(Suite, Test) -> -spec load_hooks([module()]) -> ok. load_hooks(Hooks) -> - lists:map(fun code:ensure_loaded/1, Hooks), - ok. + ok = code:ensure_modules_loaded(Hooks). %% We extract the call to the groups() method so that we can type it. -spec suite_groups(suite(), [module()]) -> groups_output(). diff --git a/prelude/erlang/common_test/test_binary/src/test_binary.erl b/prelude/erlang/common_test/test_binary/src/test_binary.erl index 2403db9f2e7..220b99ec1c0 100644 --- a/prelude/erlang/common_test/test_binary/src/test_binary.erl +++ b/prelude/erlang/common_test/test_binary/src/test_binary.erl @@ -9,13 +9,13 @@ -module(test_binary). --export([main/1]). +-export([main/1, main/0]). -include_lib("common/include/buck_ct_records.hrl"). -include_lib("common/include/tpx_records.hrl"). -include_lib("kernel/include/logger.hrl"). -% in ms, the time we give to init to stop before halting. --define(INIT_STOP_TIMEOUT, 5000). +main() -> + main(init:get_plain_arguments()). main([TestInfoFile, "list", OutputDir]) -> test_logger:set_up_logger(OutputDir, test_listing), @@ -75,79 +75,11 @@ main(Other) -> ), erlang:halt(3). --spec load_test_info(string()) -> #test_info{}. -load_test_info(TestInfoFile) -> - {ok, [ - #{ - "dependencies" := Dependencies, - "test_suite" := SuiteName, - "test_dir" := TestDir, - "config_files" := ConfigFiles, - "providers" := Providers, - "ct_opts" := CtOpts, - "extra_ct_hooks" := ExtraCtHooks, - "erl_cmd" := ErlCmd, - "extra_flags" := ExtraFlags, - "artifact_annotation_mfa" := ArtifactAnnotationMFA, - "common_app_env" := CommonAppEnv - } - ]} = file:consult(TestInfoFile), - Providers1 = buck_ct_parser:parse_str(Providers), - CtOpts1 = make_ct_opts( - buck_ct_parser:parse_str(CtOpts), - [buck_ct_parser:parse_str(CTH) || CTH <- ExtraCtHooks] - ), - #test_info{ - dependencies = [filename:absname(Dep) || Dep <- Dependencies], - test_suite = filename:join(filename:absname(TestDir), [SuiteName, ".beam"]), - config_files = lists:map(fun(ConfigFile) -> filename:absname(ConfigFile) end, ConfigFiles), - providers = Providers1, - artifact_annotation_mfa = parse_mfa(ArtifactAnnotationMFA), - ct_opts = CtOpts1, - erl_cmd = ErlCmd, - extra_flags = ExtraFlags, - common_app_env = CommonAppEnv - }. - --spec parse_mfa(string()) -> {ok, artifact_annotations:annotation_function()} | {error, term()}. -parse_mfa(MFA) -> - case erl_scan:string(MFA) of - {ok, - [ - {'fun', _}, - {atom, _, Module}, - {':', _}, - {atom, _, Function}, - {'/', _}, - {integer, _, 1} - ], - _} -> - fun Module:Function/1; - {ok, - [ - {atom, _, Module}, - {':', _}, - {atom, _, Function}, - {'/', _}, - {integer, _, 1} - ], - _} -> - fun Module:Function/1; - Reason -> - {error, Reason} - end. - --type ctopt() :: term(). --type cth() :: module() | {module(), term()}. - --spec make_ct_opts([ctopt()], [cth()]) -> [ctopt()]. -make_ct_opts(CtOpts, []) -> CtOpts; -make_ct_opts(CtOpts, ExtraCtHooks) -> [{ct_hooks, ExtraCtHooks} | CtOpts]. - --spec load_suite(string()) -> [{atom(), string()}]. +-spec load_suite(binary()) -> atom(). load_suite(SuitePath) -> - {module, Module} = code:load_abs(filename:rootname(filename:absname(SuitePath))), - {Module, filename:absname(SuitePath)}. + Path = unicode:characters_to_list(filename:rootname(filename:absname(SuitePath))), + {module, Module} = code:load_abs(Path), + Module. -spec get_hooks(#test_info{}) -> [module()]. get_hooks(TestInfo) -> @@ -163,20 +95,20 @@ get_hooks(TestInfo) -> -spec listing(string(), string()) -> ok. listing(TestInfoFile, OutputDir) -> - TestInfo = load_test_info(TestInfoFile), + TestInfo = test_info:load_from_file(TestInfoFile), Listing = get_listing(TestInfo, OutputDir), listing_interfacer:produce_xml_file(OutputDir, Listing). -spec running(string(), string(), [string()]) -> ok. running(TestInfoFile, OutputDir, Tests) -> AbsOutputDir = filename:absname(OutputDir), - TestInfo = load_test_info(TestInfoFile), + TestInfo = test_info:load_from_file(TestInfoFile), Listing = get_listing(TestInfo, AbsOutputDir), test_runner:run_tests(Tests, TestInfo, AbsOutputDir, Listing). get_listing(TestInfo, OutputDir) -> code:add_paths(TestInfo#test_info.dependencies), - {Suite, _Path} = load_suite(TestInfo#test_info.test_suite), + Suite = load_suite(TestInfo#test_info.test_suite), {ok, ProjectRoot} = file:get_cwd(), true = os:putenv("PROJECT_ROOT", ProjectRoot), @@ -197,7 +129,7 @@ get_listing(TestInfo, OutputDir) -> list_and_run(TestInfoFile, OutputDir) -> os:putenv("ERLANG_BUCK_DEBUG_PRINT", "disabled"), - TestInfo = load_test_info(TestInfoFile), + TestInfo = test_info:load_from_file(TestInfoFile), Listing = get_listing(TestInfo, OutputDir), Tests = listing_to_testnames(Listing), running(TestInfoFile, OutputDir, Tests), @@ -214,7 +146,7 @@ listing_to_testnames(Listing) -> -spec print_results(file:filename()) -> boolean(). print_results(ResultsFile) -> {ok, Data} = file:read_file(ResultsFile), - Results = jsone:decode(Data), + Results = json:decode(Data), {Summary, AnyFailure} = lists:foldl(fun print_individual_results/2, {#{}, false}, Results), io:format("~n~10s: ~b~n~n", ["TOTAL", lists:sum(maps:values(Summary))]), [ diff --git a/prelude/erlang/common_test/test_binary/src/test_info.erl b/prelude/erlang/common_test/test_binary/src/test_info.erl new file mode 100644 index 00000000000..219d79842f5 --- /dev/null +++ b/prelude/erlang/common_test/test_binary/src/test_info.erl @@ -0,0 +1,133 @@ +-module(test_info). + +-export([load_from_file/1, write_to_file/2]). +-include_lib("common/include/buck_ct_records.hrl"). + +-type test_info() :: #test_info{}. +-export_type([test_info/0]). + +-spec load_from_file(file:filename_all()) -> test_info(). +load_from_file(TestInfoFile) -> + {ok, Content} = file:read_file(TestInfoFile), + #{ + <<"dependencies">> := Dependencies, + <<"test_suite">> := SuiteName, + <<"test_dir">> := TestDir, + <<"config_files">> := ConfigFiles, + <<"providers">> := Providers, + <<"ct_opts">> := CtOpts, + <<"extra_ct_hooks">> := ExtraCtHooks, + <<"erl_cmd">> := [ErlExec | ErlFlags], + <<"extra_flags">> := ExtraFlags, + <<"artifact_annotation_mfa">> := ArtifactAnnotationMFA, + <<"common_app_env">> := CommonAppEnv + } = json:decode(Content), + Providers1 = buck_ct_parser:parse_str(Providers), + CtOpts1 = make_ct_opts( + buck_ct_parser:parse_str(CtOpts), + [buck_ct_parser:parse_str(CTH) || CTH <- ExtraCtHooks] + ), + + #test_info{ + dependencies = [unicode:characters_to_list(make_path_absolute(Dep)) || Dep <- Dependencies], + test_suite = filename:join((TestDir), [SuiteName, ".beam"]), + config_files = [make_path_absolute(ConfigFile) || ConfigFile <- ConfigFiles], + providers = Providers1, + artifact_annotation_mfa = parse_mfa(ArtifactAnnotationMFA), + ct_opts = CtOpts1, + erl_cmd = [make_path_absolute(ErlExec) | ErlFlags], + extra_flags = ExtraFlags, + common_app_env = CommonAppEnv + }. + +-spec write_to_file(file:filename_all(), test_info()) -> ok | {error, Reason :: term()}. +write_to_file(FileName, TestInfo ) -> + #test_info{ + dependencies = Dependencies, + test_suite = SuiteBeamPath, + config_files = ConfigFiles, + providers = Providers, + artifact_annotation_mfa = ArtifactAnnotationMFA, + ct_opts = CtOpts, + erl_cmd = [ErlCmd | ErlFlags], + extra_flags = ExtraFlags, + common_app_env = CommonAppEnv + } = TestInfo, + ErlTermToStr = fun(Term) -> list_to_binary(lists:flatten(io_lib:format("~p", [Term]))) end, + Json = #{ + <<"dependencies">> => [try_make_path_relative(Dep) || Dep <- Dependencies], + <<"test_suite">> => filename:basename(SuiteBeamPath, ".beam"), + <<"test_dir">> => filename:dirname(SuiteBeamPath), + <<"config_files">> => [try_make_path_relative(ConfigFile) || ConfigFile <- ConfigFiles], + <<"providers">> => ErlTermToStr(Providers), + <<"ct_opts">> => ErlTermToStr(CtOpts), + <<"extra_ct_hooks">> => [], + <<"erl_cmd">> => [try_make_path_relative(ErlCmd) | ErlFlags], + <<"extra_flags">> => ExtraFlags, + <<"artifact_annotation_mfa">> => ErlTermToStr(ArtifactAnnotationMFA), + <<"common_app_env">> => CommonAppEnv + }, + file:write_file(FileName, json:encode(Json)). + + +-spec make_path_absolute(file:filename_all()) -> file:filename_all(). +make_path_absolute(Path) -> + case os:getenv("REPO_ROOT") of + false -> filename:absname(Path); + RepoRoot -> filename:join(RepoRoot, Path) + end. + +-spec try_make_path_relative(file:filename_all()) -> file:filename_all(). +try_make_path_relative(Path) -> + case filename:pathtype(Path) of + relative -> Path; + _ -> + BaseDir = case os:getenv("REPO_ROOT") of + false -> + {ok, CWD} = file:get_cwd(), + CWD; + RepoRoot -> RepoRoot + end, + BaseDirParts = filename:split(BaseDir), + PathParts = filename:split(Path), + case lists:split(length(BaseDirParts), PathParts) of + {BaseDirParts, RelativeParts} -> filename:join(RelativeParts); + _ -> Path + end + end. + + +-spec parse_mfa(binary()) -> artifact_annotations:annotation_function() | {error, term()}. +parse_mfa(MFA) -> + case erl_scan:string(unicode:characters_to_list(MFA)) of + {ok, + [ + {'fun', _}, + {atom, _, Module}, + {':', _}, + {atom, _, Function}, + {'/', _}, + {integer, _, 1} + ], + _} -> + fun Module:Function/1; + {ok, + [ + {atom, _, Module}, + {':', _}, + {atom, _, Function}, + {'/', _}, + {integer, _, 1} + ], + _} -> + fun Module:Function/1; + Reason -> + {error, Reason} + end. + +-type ctopt() :: term(). +-type cth() :: module() | {module(), term()}. + +-spec make_ct_opts([ctopt()], [cth()]) -> [ctopt()]. +make_ct_opts(CtOpts, []) -> CtOpts; +make_ct_opts(CtOpts, ExtraCtHooks) -> [{ct_hooks, ExtraCtHooks} | CtOpts]. diff --git a/prelude/erlang/common_test/test_binary/src/test_runner.erl b/prelude/erlang/common_test/test_binary/src/test_runner.erl index a022ec418c9..22ed5662f94 100644 --- a/prelude/erlang/common_test/test_binary/src/test_runner.erl +++ b/prelude/erlang/common_test/test_binary/src/test_runner.erl @@ -22,7 +22,7 @@ -spec run_tests([string()], #test_info{}, string(), [#test_spec_test_case{}]) -> ok. run_tests(Tests, #test_info{} = TestInfo, OutputDir, Listing) -> check_ct_opts(TestInfo#test_info.ct_opts), - Suite = list_to_atom(filename:basename(TestInfo#test_info.test_suite, ".beam")), + Suite = binary_to_atom(filename:basename(TestInfo#test_info.test_suite, ".beam")), StructuredTests = lists:map(fun(Test) -> parse_test_name(Test, Suite) end, Tests), case StructuredTests of [] -> @@ -62,10 +62,8 @@ execute_test_suite( Suite, Tests, filename:absname(filename:dirname(SuitePath)), OutputDir, CtOpts ), TestSpecFile = filename:join(OutputDir, "test_spec.spec"), - lists:foreach( - fun(Spec) -> file:write_file(TestSpecFile, io_lib:format("~tp.~n", [Spec]), [append]) end, - TestSpec - ), + FormattedSpec = [io_lib:format("~tp.~n", [Entry]) || Entry <- TestSpec], + file:write_file(TestSpecFile, FormattedSpec), NewTestEnv = TestEnv#test_env{test_spec_file = TestSpecFile, ct_opts = CtOpts}, try run_test(NewTestEnv) of ok -> ok @@ -169,10 +167,10 @@ provide_output_file( case Status of failed -> collect_results_broken_run( - Tests, Suite, "test binary internal crash", ResultExec, OutLog + Tests, Suite, "internal crash", ResultExec, OutLog ); Other when Other =:= passed orelse Other =:= timeout -> - % Here we either pased or timeout. + % Here we either passed or timeout. case file:read_file(ResultsFile) of {ok, JsonFile} -> TreeResults = binary_to_term(JsonFile), @@ -228,8 +226,6 @@ provide_output_file( json -> json_interfacer:write_json_output(OutputDir, Results) end, - JsonLogs = execution_logs:create_dir_summary(OutputDir), - file:write_file(filename:join(OutputDir, "logs.json"), jsone:encode(JsonLogs)), test_artifact_directory:link_to_artifact_dir(test_logger:get_std_out(OutputDir, ct_executor), OutputDir, TestEnv), test_artifact_directory:link_to_artifact_dir(test_logger:get_std_out(OutputDir, test_runner), OutputDir, TestEnv), test_artifact_directory:prepare(OutputDir, TestEnv). @@ -335,7 +331,8 @@ add_or_append(List, {Key, Value}) -> %% @doc Built the test_spec selecting the requested tests and %% specifying the result output. -spec build_test_spec(atom(), [atom()], string(), string(), [term()]) -> [term()]. -build_test_spec(Suite, Tests, TestDir, OutputDir, CtOpts) -> +build_test_spec(Suite, Tests, TestDir0, OutputDir, CtOpts) -> + TestDir = unicode:characters_to_list(TestDir0), ListGroupTest = get_requested_tests(Tests), SpecTests = lists:map( fun @@ -422,7 +419,7 @@ reorder_tests(Tests, #test_spec_test_case{testcases = TestCases}) -> %% Make sure it exists and returns it. set_up_log_dir(OutputDir) -> LogDir = filename:join(OutputDir, "log_dir"), - filelib:ensure_path(LogDir), + ok = filelib:ensure_path(LogDir), LogDir. %% @doc Informs the test runner of a successful test run. diff --git a/prelude/erlang/common_test/test_cli_lib/BUCK.v2 b/prelude/erlang/common_test/test_cli_lib/BUCK.v2 index 9cb3223fc9d..be0241fb233 100644 --- a/prelude/erlang/common_test/test_cli_lib/BUCK.v2 +++ b/prelude/erlang/common_test/test_cli_lib/BUCK.v2 @@ -1,16 +1,18 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + erlang_application( name = "test_cli_lib", srcs = glob(["src/*.erl"]), - applications = [ - "//erlang/common_test/test_binary:test_binary", - ] + read_root_config("erlang", "test_shell_apps", "").split(), + applications = ["//erlang/common_test/test_binary:test_binary"], erl_opts = [ "+debug_info", "+warnings_as_errors", ], - resources = [ - # "//erl/config:sys.ct.config", - ], + resources = [], visibility = ["PUBLIC"], ) diff --git a/prelude/erlang/common_test/test_cli_lib/src/test.erl b/prelude/erlang/common_test/test_cli_lib/src/test.erl index 28f069f9409..ae930da9277 100644 --- a/prelude/erlang/common_test/test_cli_lib/src/test.erl +++ b/prelude/erlang/common_test/test_cli_lib/src/test.erl @@ -15,7 +15,7 @@ -module(test). --include_lib("common/include/tpx_records.hrl"). +-include_lib("common/include/buck_ct_records.hrl"). %% Public API -export([ @@ -24,7 +24,8 @@ list/0, list/1, rerun/1, run/0, run/1, - reset/0 + reset/0, + logs/0 ]). %% init @@ -44,6 +45,8 @@ -type run_spec() :: test_id() | [test_info()]. -type run_result() :: {non_neg_integer(), non_neg_integer()}. +-type provided_test_info() :: test_info:test_info(). + -spec start() -> ok. start() -> info(), @@ -133,6 +136,8 @@ command_description(run, 1) -> }; command_description(reset, 0) -> #{args => [], desc => ["restarts the test node, enabling a clean test state"]}; +command_description(logs, 0) -> + #{args => [], desc => ["print log files of the currently running test suites"]}; command_description(F, A) -> error({help_is_missing, {F, A}}). @@ -191,7 +196,7 @@ run(RegExOrId) -> ChangedCount -> io:format("reloaded ~p modules ~P~n", [ChangedCount, Loaded, 10]), % There were some changes, so list the tests again, then run but without recompiling changes - % Note that if called with the RegEx insted of ToRun test list like above, do_plain_test_run/1 will list the tests again + % Note that if called with the RegEx instead of ToRun test list like above, do_plain_test_run/1 will list the tests again do_plain_test_run(RegExOrId) end; Error -> @@ -213,6 +218,18 @@ reset() -> }) end. +%% @doc Print all the logs of the currently running test suites +-spec logs() -> ok. +logs() -> + ensure_initialized(), + case logs_impl() of + {ok, Logs} -> + lists:foreach(fun(LogPath) -> io:format("~s~n", [LogPath]) end, Logs), + io:format("~n"); + {error, not_found} -> + io:format("no logs found~n") + end. + %% internal -spec list_impl(RegEx :: string()) -> {ok, string()} | {error, term()}. list_impl(RegEx) -> @@ -241,22 +258,29 @@ ensure_initialized() -> -spec init_utility_apps() -> boolean(). init_utility_apps() -> + _ = application:load(test_cli_lib), + UtilityApps = application:get_env(test_cli_lib, utility_applications, []), RunningApps = proplists:get_value(running, application:info()), - case proplists:is_defined(test_cli_lib, RunningApps) of + StartResults = [init_utility_app(RunningApps, UtilityApp) || UtilityApp <- UtilityApps], + lists:any(fun(B) when is_boolean(B) -> B end, StartResults). + +-spec init_utility_app(RunningApps :: [atom()], UtilityApp :: atom()) -> boolean(). +init_utility_app(RunningApps, UtilityApp) -> + case proplists:is_defined(UtilityApp, RunningApps) of true -> false; false -> - io:format("starting utility applications...~n", []), - case application:ensure_all_started(test_cli_lib) of + io:format("starting utility application ~s...~n", [UtilityApp]), + case application:ensure_all_started(UtilityApp) of {ok, _} -> true; Error -> - io:format("ERROR: could not start utility applications:~n~p~n", [Error]), - io:format("exiting...~n"), - erlang:halt(-1) + abort("could not start utility applications:~n~p", [Error]) end end. +-define(TYPE_IS_OK(Type), (Type =:= shortnames orelse Type =:= longnames)). + -spec init_node() -> boolean(). init_node() -> case ct_daemon:alive() of @@ -264,16 +288,20 @@ init_node() -> false; false -> io:format("starting test node...~n", []), + #test_info{erl_cmd = ErlCmd} = get_provided_test_info(), case application:get_env(test_cli_lib, node_config) of undefined -> - ct_daemon:start(); - {ok, {Type, NodeName, Cookie}} -> - ct_daemon:start(#{ - name => NodeName, - type => Type, - cookie => Cookie, - options => [{multiply_timetraps, infinity} || is_debug_session()] - }) + ct_daemon:start(ErlCmd); + {ok, {Type, NodeName, Cookie}} when ?TYPE_IS_OK(Type), is_atom(NodeName), is_atom(Cookie) -> + ct_daemon:start( + ErlCmd, + #{ + name => NodeName, + type => Type, + cookie => Cookie, + options => [{multiply_timetraps, infinity} || is_debug_session()] + } + ) end, case is_debug_session() of true -> @@ -284,6 +312,25 @@ init_node() -> true end. +-spec get_provided_test_info() -> provided_test_info(). +get_provided_test_info() -> + case application:get_env(test_cli_lib, test_info_file, undefined) of + undefined -> + abort("test_info_file not provided."); + TestInfoFile when is_binary(TestInfoFile) -> + test_info:load_from_file(TestInfoFile) + end. + +-spec abort(Message :: string()) -> no_return(). +abort(Message) -> + abort(Message, []). + +-spec abort(Format :: string(), Args :: [term()]) -> no_return(). +abort(Format, Args) -> + io:format(standard_error, "ERROR: " ++ Format ++ "~n", Args), + io:format(standard_error, "exiting...~n", []), + erlang:halt(1). + -spec watchdog() -> no_return(). watchdog() -> Node = ct_daemon_node:get_node(), @@ -417,3 +464,17 @@ start_shell() -> user_drv:start(), ok end. + +-spec logs_impl() -> {ok, [file:filename_all()]} | {error, not_found}. +logs_impl() -> + case ct_daemon:priv_dir() of + undefined -> + {error, not_found}; + PrivDir -> + PatternLog = filename:join(PrivDir, "*.log"), + LogPaths = filelib:wildcard(PatternLog), + PatternLogJson = filename:join(PrivDir, "*.log.json"), + LogJsonPaths = filelib:wildcard(PatternLogJson), + AllLogs = lists:sort(LogPaths ++ LogJsonPaths), + {ok, AllLogs} + end. diff --git a/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl b/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl index 9cb1778f63e..cc4079dc038 100644 --- a/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl +++ b/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl @@ -7,8 +7,10 @@ -module(test_cli_e2e_SUITE). -include_lib("stdlib/include/assert.hrl"). +-include_lib("common_test/include/ct.hrl"). +-include_lib("common/include/buck_ct_records.hrl"). --export([all/0]). +-export([all/0, init_per_suite/1, end_per_suite/1]). -export([ test_list/1 @@ -17,6 +19,30 @@ all() -> [test_list]. +init_per_suite(Config) -> + PrivDir = ?config(priv_dir, Config), + TestInfoFile = filename:join(PrivDir, <<"test_info">>), + + {ok, [ErlCmd]} = init:get_argument(progname), + + test_info:write_to_file(TestInfoFile, #test_info{ + dependencies = [], + test_suite = list_to_binary(code:which(test_list_SUITE)), + config_files = [], + providers = [], + ct_opts = [], + erl_cmd = ErlCmd, + extra_flags = [], + artifact_annotation_mfa = {foo, bar, 42} + }), + + application:set_env(test_cli_lib, test_info_file, TestInfoFile), + + Config. + +end_per_suite(_Config) -> + ok. + test_list(_Config) -> Expected = "test_cli_e2e_SUITE:\n" diff --git a/prelude/erlang/common_test/test_exec/BUCK.v2 b/prelude/erlang/common_test/test_exec/BUCK.v2 index 1db95d1a632..1cb78df3048 100644 --- a/prelude/erlang/common_test/test_exec/BUCK.v2 +++ b/prelude/erlang/common_test/test_exec/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + erlang_application( name = "test_exec", srcs = glob([ diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon.erl index c2a1aa6d18e..21b3f488cc9 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon.erl @@ -14,7 +14,7 @@ -module(ct_daemon). -export([ - start/0, start/1, + start/1, start/2, stop/0, alive/0, run/1, @@ -26,18 +26,22 @@ discover/1, load_changed/0, setup_state/0, - output_dir/0 + output_dir/0, + priv_dir/0 ]). %% @doc start a test-node with random name and shortname --spec start() -> ok. -start() -> - ct_daemon_node:start(). +-spec start(ErlCommand) -> ok when + ErlCommand :: [binary()]. +start(ErlCommand) -> + ct_daemon_node:start(ErlCommand). %% @doc starts the test node with the given distribution mode and node name --spec start(ct_daemon_node:config()) -> ok. -start(NodeInfo) -> - ct_daemon_node:start(NodeInfo). +-spec start(ErlCommand, Config) -> ok when + ErlCommand :: [binary()], + Config :: ct_daemon_node:config(). +start(ErlCommand, NodeInfo) -> + ct_daemon_node:start(ErlCommand, NodeInfo). %% @doc stops the test node -spec stop() -> ok. @@ -107,6 +111,10 @@ setup_state() -> output_dir() -> do_call(output_dir). +-spec priv_dir() -> file:filename_all() | undefined. +priv_dir() -> + do_call(priv_dir). + -spec push_paths(Paths :: [file:filename_all()]) -> ok. push_paths(Paths) -> case alive() of diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl index d1026c55925..913edb8eef2 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl @@ -167,7 +167,7 @@ initialize_hooks() -> end || {Mod, Opts, Prio} <- NormalizedConfiguredHooks ], - %% according to documentation, if two hooks have the same ID, the latter one get's dropped + %% according to documentation, if two hooks have the same ID, the latter one gets dropped PreInitHooks0 = lists:ukeysort(2, HooksWithId), %% now sort with configured prio the inits (default prio being 0) PreInitHooks1 = lists:keysort(1, PreInitHooks0), diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl index ab147833b5e..74badb9db55 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl @@ -7,7 +7,12 @@ %%%------------------------------------------------------------------- %%% @doc -%%% Setup functions for logger and CT printing facilities +%%% CT handles logging and printing by sending a message to the ct_logs +%%% process. We intercept those messages for test shell by starting a +%%% gen_server that intercepts the messages and prints them to the test +%%% shell. We do this instead of using the ct_logs process to have more +%%% control over the output and to avoid starting ct processes that +%%% might interfere with test shell's functionality. %%% @end %%% % @format @@ -15,67 +20,60 @@ -include_lib("kernel/include/logger.hrl"). +-behaviour(gen_server). + %% Public API --export([setup/2]). +-export([start/1]). + +%% gen_server callbacks +-export([init/1, handle_info/2, handle_call/3, handle_cast/2]). + +-type state() :: #{}. + +-spec init(Args) -> Result when + Args :: term(), + Result :: {ok, state()}. +init(_) -> {ok, #{}}. + +-spec handle_info(Info, State) -> {noreply, State} when + Info :: term(), + State :: state(). +handle_info({log, _SyncOrAsync, _FromPid, _GL, _Category, _Importance, Content, _EscChars} = _Info, State) when + is_list(Content) +-> + % Mimics behaviour from the logger_loop function in ct_logs.erl + IoList = lists:foldl( + fun + ({Format, Args}, IoList) when is_list(Format), is_list(Args) -> + [io_lib:format(Format, Args), "\n", IoList]; + (_, IoList) -> + IoList + end, + [], + Content + ), + io:format("~ts~n", [IoList]), + {noreply, State}; +handle_info(_Info, State) -> + % ignore + {noreply, State}. + +-spec handle_call(Request, From, State) -> {noreply, State} when + Request :: term(), + From :: gen_server:from(), + State :: state(). +handle_call(_Info, _From, State) -> {noreply, State}. + +-spec handle_cast(Request, State) -> {noreply, State} when + Request :: term(), + State :: state(). +handle_cast(_Info, State) -> {noreply, State}. %% @doc mocks for ct_logs functions --spec setup(file:filename_all(), boolean()) -> ok. -setup(OutputDir, InstrumentCTLogs) -> +-spec start(file:filename_all()) -> ok. +start(OutputDir) -> LogFile = test_logger:get_log_file(OutputDir, ct_daemon), ok = test_logger:configure_logger(LogFile), - %% check is we need to instrument ct_logs - %% this somehow crashes the node startup if CT runs on the - %% controlling node - case InstrumentCTLogs of - true -> - meck:new(ct_logs, [passthrough, no_link]), - meck:expect(ct_logs, tc_log, fun tc_log/3), - meck:expect(ct_logs, tc_log, fun tc_log/4), - meck:expect(ct_logs, tc_log, fun tc_log/5), - meck:expect(ct_logs, tc_print, fun tc_print/3), - meck:expect(ct_logs, tc_print, fun tc_print/4), - meck:expect(ct_logs, tc_print, fun tc_print/5), - meck:expect(ct_logs, tc_pal, fun tc_pal/3), - meck:expect(ct_logs, tc_pal, fun tc_pal/4), - meck:expect(ct_logs, tc_pal, fun tc_pal/5); - _ -> - ok - end, + {ok, _} = gen_server:start_link({local, ct_logs}, ?MODULE, #{}, []), ok. - -tc_log(Category, Format, Args) -> - tc_print(Category, 1000, Format, Args). - -tc_log(Category, Importance, Format, Args) -> - tc_print(Category, Importance, Format, Args, []). - -tc_log(Category, Importance, Format, Args, _Opts) -> - LogMessage = lists:flatten( - io_lib:format("[ct_logs][~p][~p] ~s", [Category, Importance, Format]) - ), - ?LOG_INFO(LogMessage, Args). - -tc_print(Category, Format, Args) -> - tc_print(Category, 1000, Format, Args). - -tc_print(Category, Importance, Format, Args) -> - tc_print(Category, Importance, Format, Args, []). - -tc_print(_Category, _Importance, Format, Args, _Opts) -> - FormatWithTimesStamp = io_lib:format("[~s] ~s\n", [timestamp(), Format]), - FinalFormat = lists:flatten(FormatWithTimesStamp), - io:format(FinalFormat, Args). - -tc_pal(Category, Format, Args) -> - tc_print(Category, 1000, Format, Args). - -tc_pal(Category, Importance, Format, Args) -> - tc_print(Category, Importance, Format, Args, []). - -tc_pal(Category, Importance, Format, Args, Opts) -> - ct_logs:tc_log(Category, Importance, Format, Args, [no_css | Opts]), - tc_print(Category, Importance, Format, Args, Opts). - -timestamp() -> - calendar:system_time_to_rfc3339(erlang:system_time(second)). diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl index 0396f818c04..7a8a627c70a 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl @@ -19,7 +19,7 @@ -include_lib("kernel/include/logger.hrl"). %% Public API --export([start/0, start/1, stop/0, alive/0, get_node/0]). +-export([start/1, start/2, stop/0, alive/0, get_node/0]). -export([node_main/1, get_domain_type/0]). @@ -39,8 +39,9 @@ -export_type([config/0]). %% @doc start node for running tests in isolated way and keep state --spec start() -> ok. -start() -> +-spec start(ErlCommand) -> ok when + ErlCommand :: nonempty_list(binary()). +start(ErlCommand) -> NodeName = list_to_atom( lists:flatten(io_lib:format("test~s-atn@localhost", [random_name()])) ), @@ -50,11 +51,14 @@ start() -> cookie => ct_runner:cookie(), options => [] }, - start(StartConfig). + start(ErlCommand, StartConfig). %% @doc start node for running tests in isolated way and keep state --spec start(config()) -> ok | {error, {crash_on_startup, integer()}}. +-spec start(ErlCommand, Config) -> ok | {error, {crash_on_startup, integer()}} when + ErlCommand :: nonempty_list(binary()), + Config :: config(). start( + ErlCommand, _Config = #{ type := Type, name := Node, @@ -71,10 +75,8 @@ start( FullOptions = [{output_dir, OutputDir} | Options], Args = build_daemon_args(Type, Node, Cookie, FullOptions, OutputDir), % Replay = maps:get(replay, Config, false), - % We should forward emu flags here, - % see T129435667 Port = ct_runner:start_test_node( - os:find_executable("erl"), + ErlCommand, [], CodePaths, ConfigFiles, @@ -130,7 +132,7 @@ alive() -> %% @doc node main entry point -spec node_main([node()]) -> no_return(). -node_main([Parent, OutputDirAtom, InstrumentCTLogs]) -> +node_main([Parent, OutputDirAtom]) -> ok = application:load(test_exec), OutputDir = erlang:atom_to_list(OutputDirAtom), @@ -138,7 +140,7 @@ node_main([Parent, OutputDirAtom, InstrumentCTLogs]) -> erlang:system_flag(backtrace_depth, 20), %% setup logger and prepare IO - ok = ct_daemon_logger:setup(OutputDir, InstrumentCTLogs), + ok = ct_daemon_logger:start(OutputDir), true = net_kernel:connect_node(Parent), @@ -173,7 +175,7 @@ ensure_distribution(Type, RandomName, Cookie) -> ([] = os:cmd("epmd -daemon")), Name = list_to_atom( lists:flatten( - io_lib:format("ct_daemon~s", [RandomName]) + io_lib:format("ct_daemon~s@localhost", [RandomName]) ) ), {ok, _Pid} = net_kernel:start(Name, #{name_domain => Type}), @@ -194,7 +196,6 @@ build_daemon_args(Type, Node, Cookie, Options, OutputDir) -> longnames -> "-name"; shortnames -> "-sname" end, - InstrumentCTLogs = erlang:whereis(ct_logs) =:= undefined, [ DistArg, convert_atom_arg(Node), @@ -207,8 +208,7 @@ build_daemon_args(Type, Node, Cookie, Options, OutputDir) -> convert_atom_arg(?MODULE), "node_main", convert_atom_arg(erlang:node()), - OutputDir, - convert_atom_arg(InstrumentCTLogs) + OutputDir ]. -spec convert_atom_arg(atom()) -> string(). @@ -217,13 +217,8 @@ convert_atom_arg(Arg) -> -spec get_config_files() -> [file:filename_all()]. get_config_files() -> - _ = application:load(test_exec), - PrivDir = code:priv_dir(test_exec), - [ - ConfigFile - || ConfigFile <- filelib:wildcard(filename:join(PrivDir, "*")), - filename:extension(ConfigFile) =:= ".config" - ]. + %% get config files from command line + [F || {config, F} <- init:get_arguments()]. -spec gen_output_dir(RandomName :: string()) -> file:filename(). gen_output_dir(RandomName) -> diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl index dc8f945d9c6..b20b3efbfdf 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl @@ -80,7 +80,7 @@ print_skip_error(Name, Where, Type, Reason, Stacktrace) -> skip. print_skip_location({_, GroupOrSuite}) -> - case re:match(atom_to_list(GroupOrSuite), "SUITE$") of + case re:run(atom_to_list(GroupOrSuite), "SUITE$") of nomatch -> io_lib:format("init_per_group of ~s", [GroupOrSuite]); _ -> "init_per_suite" end; diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl index 6f583ff8c06..831dbfdb48e 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl @@ -113,6 +113,15 @@ handle_call(setup, _From, State) -> handle_call(output_dir, _From, State) -> DaemonOptions = application:get_env(test_exec, daemon_options, []), {reply, proplists:get_value(output_dir, DaemonOptions), State}; +handle_call(priv_dir, _From, State) -> + Response = + case State of + #{setup := #{config := Config}} -> + proplists:get_value(priv_dir, Config); + _ -> + undefined + end, + {reply, Response, State}; handle_call(Request, _From, State) -> {reply, Request, State}. diff --git a/prelude/erlang/common_test/test_exec/src/ct_runner.erl b/prelude/erlang/common_test/test_exec/src/ct_runner.erl index 5240f28b1c6..cd401834320 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_runner.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_runner.erl @@ -210,7 +210,7 @@ common_app_env_args(Env) -> lists:append([["-common", Key, Value] || {Key, Value} <- maps:to_list(Env)]). -spec start_test_node( - Erl :: string(), + Erl :: [binary()], ExtraFlags :: [string()], CodePath :: [file:filename_all()], ConfigFiles :: [file:filename_all()], @@ -236,7 +236,7 @@ start_test_node( ). -spec start_test_node( - Erl :: string(), + Erl :: [binary()], ExtraFlags :: [string()], CodePath :: [file:filename_all()], ConfigFiles :: [file:filename_all()], @@ -254,10 +254,7 @@ start_test_node( ReplayIo ) -> % split of args from Erl which can contain emulator flags - [_Executable | Flags] = string:split(ErlCmd, " ", all), - % we ignore the executable we got, and use the erl command from the - % toolchain that executes this code - ErlExecutable = os:find_executable("erl"), + [ErlExecutable | Flags] = ErlCmd, % HomeDir is the execution directory. HomeDir = set_home_dir(OutputDir), @@ -319,16 +316,48 @@ config_arg(ConfigFiles) -> ["-config"] ++ ConfigFiles. %% Each test execution will have a separate home dir with a %% erlang default cookie file, setting the default cookie to %% buck2-test-runner-cookie --spec set_home_dir(file:filename()) -> file:filename(). +-spec set_home_dir(file:filename_all()) -> file:filename_all(). set_home_dir(OutputDir) -> HomeDir = filename:join(OutputDir, "HOME"), ErlangCookieFile = filename:join(HomeDir, ".erlang.cookie"), ok = filelib:ensure_dir(ErlangCookieFile), ok = file:write_file(ErlangCookieFile, atom_to_list(cookie())), ok = file:change_mode(ErlangCookieFile, 8#00400), + + % In case the system is using dotslash, we leave a symlink to + % the real dotslash cache, otherwise erl could be re-downloaded, etc + try_setup_dotslash_cache(HomeDir), + HomeDir. --spec cookie() -> string(). +-spec try_setup_dotslash_cache(FakeHomeDir :: file:filename_all()) -> ok. +try_setup_dotslash_cache(FakeHomeDir) -> + case init:get_argument(home) of + {ok, [[RealHomeDir]]} -> + RealDotslashCacheDir = filename:basedir(user_cache, "dotslash"), + + case filelib:is_file(RealDotslashCacheDir) of + false -> + ok; + true -> + RealHomeDirParts = filename:split(RealHomeDir), + RealDotslashCacheDirParts = filename:split(RealDotslashCacheDir), + + case lists:split(length(RealHomeDirParts), RealDotslashCacheDirParts) of + {RealHomeDirParts, GenDotslashCacheDirParts} -> + FakeHomeDotslashCacheDir = filename:join([FakeHomeDir | GenDotslashCacheDirParts]), + ok = filelib:ensure_path(filename:dirname(FakeHomeDotslashCacheDir)), + ok = file:make_symlink(RealDotslashCacheDir, FakeHomeDotslashCacheDir), + ok; + _ -> + ok + end + end; + _ -> + ok + end. + +-spec cookie() -> atom(). cookie() -> 'buck2-test-runner-cookie'. diff --git a/prelude/erlang/elp.bxl b/prelude/erlang/elp.bxl new file mode 100644 index 00000000000..c82610f4df9 --- /dev/null +++ b/prelude/erlang/elp.bxl @@ -0,0 +1,150 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# Provide information so that ELP is able to load a BUCK project + +load("@prelude//:paths.bzl", "paths") + +# ------------------ IMPL ------------------ + +def _clean_up_includes(includes): + # - Strip to just dir, not file name + # - Remove duplicates + # Note: Sometimes the buck rule generating the includes has an excludes glob for a directory. + # This flattening will potentially expose excluded files in the directory. + # But we do it, because otherwise the performance in erlang_service parsing is too poor. + include_paths = [_as_path(paths.dirname(p["value"])) for p in includes if p["type"] == "path"] + targets = [t for t in includes if t["type"] == "target"] + return include_paths + targets + +def _get_includes(ctx, includes_target): + return _clean_up_includes([_get_absolute_path(ctx, inc) for inc in includes_target.value()]) + +def _dep_includes(ctx, dep, target_universe): + lookup_val = "{}:{}".format(dep.path, dep.name) + dep_target = target_universe.lookup(lookup_val) + if not dep_target: + return [] + dep_target = dep_target[0] + includes = dep_target.attrs_lazy().get("includes") + + if not includes: + return [] + else: + return _get_includes(ctx, includes) + +def _get_absolute_path(ctx, src) -> dict[str, str]: + """ + Get the absolute path of the thing passed in, which is either an artifact or a target label. + """ + if isinstance(src, ConfiguredProvidersLabel): + return _as_target(str(src.raw_target())) + else: + return _as_path(get_path_without_materialization(src, ctx, abs = True)) + +def _elp_config(ctx): + cells = { + cell: cell_path + for cell, cell_path in ctx.audit().cell(aliases = True).items() + } + + included_targets = ctx.cli_args.included_targets + target_universe = ctx.target_universe(included_targets) + + all = ctx.cquery().kind("^(erlang_app|erlang_test)$", ctx.configured_targets(included_targets)) + if ctx.cli_args.deps_target: + all += ctx.cquery().kind("^erlang_app$", ctx.configured_targets(ctx.cli_args.deps_target)) + result = {} + dep_includes_cache = {} # Cache of includes per dependency + for target in all: + label = target.label + label_name = label.raw_target() + deps = target.attrs_lazy().get("deps") + + includes = target.attrs_lazy().get("includes") + if not includes: + includes = [] + else: + includes = _get_includes(ctx, includes) + + if ctx.cli_args.deps_includes: + if deps: + for dep in deps.value(): + if dep in dep_includes_cache: + dep_includes = dep_includes_cache[dep] + else: + dep_includes = _dep_includes(ctx, dep, target_universe) + dep_includes_cache[dep] = dep_includes + includes = includes + dep_includes + apps = target.attrs_lazy().get("applications") + + if apps: + for app in apps.value(): + includes.append(_as_path(str(_file_node_to_path(cells, app.path)))) + + srcs = target.attrs_lazy().get("srcs") + if not srcs: + srcs = [] + else: + srcs = [_get_absolute_path(ctx, src) for src in srcs.value()] + + suite = target.attrs_lazy().get("suite") + if not suite: + suite = None + elif suite.value() == None: + suite = None + else: + suite_info = _get_absolute_path(ctx, suite.value()) + if suite_info["type"] == "path": + suite = suite_info["value"] + else: + suite = None + + includes = _build_output(includes) + srcs = _build_output(srcs) + result[label_name] = dict( + name = target.attrs_lazy().get("name"), + suite = suite, + srcs = srcs, + includes = includes, + labels = target.attrs_lazy().get("labels"), + ) + ctx.output.print_json(result) + +def _file_node_to_path(cells: dict[str, str], file_node) -> str: + cell, path = str(file_node).split("//", 1) + return paths.join(cells[cell], path) + +def _build_output(items: list[dict[str, str]]) -> list[str]: + # we completely ignore targets, since we don't have support for generated files in ELP + paths = _list_dedupe([p["value"] for p in items if p["type"] == "path"]) + return paths + +def _list_dedupe(xs: list[str]) -> list[str]: + return {x: True for x in xs}.keys() + +def _as_path(src): + return {"type": "path", "value": src} + +def _as_target(src): + return {"type": "target", "value": src} + +# ------------------ INTERFACE ------------------ + +elp_config = bxl_main( + impl = _elp_config, + cli_args = { + "deps_includes": cli_args.bool(False, doc = "Use include paths from the dependencies too."), + "deps_target": cli_args.option(cli_args.string(), doc = "Target to include deps from, if specified. See corresponding field in .elp.toml"), + "included_targets": cli_args.list(cli_args.string(), doc = "Targets to include in the query. See corresponding field in .elp.toml"), + }, +) + +# Run with `buck2 bxl prelude//erlang/elp.bxl:elp_config` +# e.g. +# buck2 bxl prelude//erlang/elp.bxl:elp_config -- --included_targets cell//... +# buck2 bxl prelude//erlang/elp.bxl:elp_config -- --included_targets cell//... --deps_includes true diff --git a/prelude/erlang/erlang_application.bzl b/prelude/erlang/erlang_application.bzl index 5c8606ed510..29fac0d865f 100644 --- a/prelude/erlang/erlang_application.bzl +++ b/prelude/erlang/erlang_application.bzl @@ -33,11 +33,8 @@ load( ":erlang_utils.bzl", "action_identifier", "build_paths", - "convert", "multidict_projection", "multidict_projection_key", - "normalise_metadata", - "to_term_args", ) StartDependencySet = transitive_set() @@ -107,7 +104,11 @@ def build_application(ctx, toolchains, dependencies, build_fun) -> list[Provider # generate DefaultInfo and RunInfo providers default_info = _build_default_info(dependencies, primary_app_folder) - run_info = erlang_shell.build_run_info(ctx, dependencies.values(), additional_app_paths = [primary_app_folder]) + run_info = erlang_shell.build_run_info( + ctx, + dependencies = dependencies.values(), + additional_app_paths = [primary_app_folder], + ) return [ default_info, run_info, @@ -240,8 +241,6 @@ def _generate_app_file( NOTE: We are using the .erl files as input to avoid dependencies on beams. """ - tools = toolchain.otp_binaries - _check_application_dependencies(ctx) app_file_name = build_paths.app_file(ctx) @@ -251,22 +250,15 @@ def _generate_app_file( app_file_name, ), ) - script = toolchain.app_file_script app_info_file = _app_info_content(ctx, toolchain, name, srcs, output) app_build_cmd = cmd_args( - [ - tools.escript, - script, - app_info_file, - ], + app_info_file, + hidden = [output.as_output(), srcs] + ([ctx.attrs.app_src] if ctx.attrs.app_src else []), ) - app_build_cmd.hidden(output.as_output()) - app_build_cmd.hidden(srcs) - if ctx.attrs.app_src: - app_build_cmd.hidden(ctx.attrs.app_src) - erlang_build.utils.run_with_env( + erlang_build.utils.run_escript( ctx, toolchain, + toolchain.app_file_script, app_build_cmd, category = "app_resource", identifier = action_identifier(toolchain, paths.basename(app_file_name)), @@ -297,9 +289,7 @@ def _app_info_content( name: str, srcs: list[Artifact], output: Artifact) -> Artifact: - """build an app_info.term file that contains the meta information for building the .app file""" - sources_args = convert(srcs) - sources_args.ignore_artifacts() + """build an app_info.json file that contains the meta information for building the .app file""" data = { "applications": [ app[ErlangAppInfo].name @@ -311,7 +301,7 @@ def _app_info_content( ], "name": name, "output": output, - "sources": sources_args, + "sources": srcs, } if ctx.attrs.version: data["version"] = ctx.attrs.version @@ -320,14 +310,13 @@ def _app_info_content( if ctx.attrs.mod: data["mod"] = ctx.attrs.mod if ctx.attrs.env: - data["env"] = {k: cmd_args(v) for k, v in ctx.attrs.env.items()} + data["env"] = ctx.attrs.env if ctx.attrs.extra_properties: - data["metadata"] = {k: normalise_metadata(v) for k, v in ctx.attrs.extra_properties.items()} + data["metadata"] = ctx.attrs.extra_properties - app_info_content = to_term_args(data) - return ctx.actions.write( - paths.join(erlang_build.utils.build_dir(toolchain), "app_info.term"), - app_info_content, + return ctx.actions.write_json( + paths.join(erlang_build.utils.build_dir(toolchain), "app_info.json"), + data, ) def link_output( diff --git a/prelude/erlang/erlang_build.bzl b/prelude/erlang/erlang_build.bzl index d35dec5f17a..2cedf41d87e 100644 --- a/prelude/erlang/erlang_build.bzl +++ b/prelude/erlang/erlang_build.bzl @@ -169,17 +169,19 @@ def _generate_input_mapping(build_environment: BuildEnvironment, input_artifacts def _generated_source_artifacts(ctx: AnalysisContext, toolchain: Toolchain, name: str) -> PathArtifactMapping: """Generate source output artifacts and build actions for generated erl files.""" - inputs = [src for src in ctx.attrs.srcs if _is_xyrl(src)] - outputs = { - module_name(src): _build_xyrl( + + def build(src, custom_include_opt): + return _build_xyrl( ctx, toolchain, src, + custom_include_opt, ctx.actions.declare_output(generated_erl_path(toolchain, name, src)), ) - for src in inputs - } - return outputs + + yrl_outputs = {module_name(src): build(src, "yrl_includefile") for src in ctx.attrs.srcs if _is_yrl(src)} + xrl_outputs = {module_name(src): build(src, "xrl_includefile") for src in ctx.attrs.srcs if _is_xrl(src)} + return yrl_outputs | xrl_outputs def _generate_include_artifacts( ctx: AnalysisContext, @@ -285,8 +287,8 @@ def _generate_beam_artifacts( input_mapping = build_environment.input_mapping, ) - dep_info_content = to_term_args(_build_dep_info_data(updated_build_environment)) - dep_info_file = ctx.actions.write(_dep_info_name(toolchain), dep_info_content) + dep_info_content = _build_dep_info_data(updated_build_environment) + dep_info_file = ctx.actions.write_json(_dep_info_name(toolchain), dep_info_content) for erl in src_artifacts: _build_erl(ctx, toolchain, updated_build_environment, dep_info_file, erl, beam_mapping[module_name(erl)]) @@ -388,6 +390,8 @@ def _get_deps_file(ctx: AnalysisContext, toolchain: Toolchain, src: Artifact) -> "minimal", "-noinput", "-noshell", + "-pa", + toolchain.utility_modules, "-run", "escript", "start", @@ -418,21 +422,19 @@ def _build_xyrl( ctx: AnalysisContext, toolchain: Toolchain, xyrl: Artifact, + custom_include_opt: str, output: Artifact) -> Artifact: """Generate an erl file out of an xrl or yrl input file.""" erlc = toolchain.otp_binaries.erlc - erlc_cmd = cmd_args( - [ - erlc, - "-o", - cmd_args(output.as_output()).parent(), - xyrl, - ], - ) + custom_include = getattr(ctx.attrs, custom_include_opt, None) + cmd = cmd_args(erlc) + if custom_include: + cmd.add("-I", custom_include) + cmd.add("-o", cmd_args(output.as_output(), parent = 1), xyrl) _run_with_env( ctx, toolchain, - erlc_cmd, + cmd, category = "erlc", identifier = action_identifier(toolchain, xyrl.basename), ) @@ -452,25 +454,15 @@ def _build_erl( final_dep_file = ctx.actions.declare_output(_dep_final_name(toolchain, src)) finalize_deps_cmd = cmd_args( - toolchain.otp_binaries.erl, - "+A0", - "+S1:1", - "+sbtu", - "-mode", - "minimal", - "-noinput", - "-noshell", - "-run", - "escript", - "start", - "--", - toolchain.dependency_finalizer, src, dep_info_file, final_dep_file.as_output(), + hidden = build_environment.deps_files.values(), ) - finalize_deps_cmd.hidden(build_environment.deps_files.values()) - ctx.actions.run( + _run_escript( + ctx, + toolchain, + toolchain.dependency_finalizer, finalize_deps_cmd, category = "dependency_finalizer", identifier = action_identifier(toolchain, src.basename), @@ -488,12 +480,14 @@ def _build_erl( _dependency_code_paths(build_environment), ), "-o", - cmd_args(outputs[output].as_output()).parent(), + cmd_args(outputs[output].as_output(), parent = 1), src, ], ) - erlc_cmd, mapping = _add_dependencies_to_args(artifacts, final_dep_file, erlc_cmd, build_environment) - erlc_cmd = _add_full_dependencies(erlc_cmd, build_environment) + deps_args, mapping = _dependencies_to_args(artifacts, final_dep_file, build_environment) + erlc_cmd.add(deps_args) + full_deps_args = _full_dependencies(build_environment) + erlc_cmd.add(full_deps_args) _run_with_env( ctx, toolchain, @@ -504,7 +498,7 @@ def _build_erl( always_print_stderr = True, ) - ctx.actions.dynamic_output(dynamic = [final_dep_file], inputs = [src], outputs = [output], f = dynamic_lambda) + ctx.actions.dynamic_output(dynamic = [final_dep_file], inputs = [src], outputs = [output.as_output()], f = dynamic_lambda) return None def _build_edoc( @@ -528,7 +522,7 @@ def _build_edoc( "-pa", toolchain.utility_modules, "-o", - cmd_args(output.as_output()).parent(2), + cmd_args(output.as_output(), parent = 2), ], ) @@ -538,11 +532,14 @@ def _build_edoc( args = _erlc_dependency_args(_dependency_include_dirs(build_environment), [], False) eval_cmd.add(args) + eval_cmd_hidden = [] for include in build_environment.includes.values(): - eval_cmd.hidden(include) + eval_cmd_hidden.append(include) for include in build_environment.private_includes.values(): - eval_cmd.hidden(include) + eval_cmd_hidden.append(include) + + eval_cmd.add(cmd_args(hidden = eval_cmd_hidden)) _run_with_env( ctx, @@ -554,13 +551,14 @@ def _build_edoc( ) return None -def _add_dependencies_to_args( +def _dependencies_to_args( artifacts, final_dep_file: Artifact, - args: cmd_args, build_environment: BuildEnvironment) -> (cmd_args, dict[str, (bool, [str, Artifact])]): """Add the transitive closure of all per-file Erlang dependencies as specified in the deps files to the `args` with .hidden. """ + args_hidden = [] + input_mapping = {} deps = artifacts[final_dep_file].read_json() @@ -612,30 +610,31 @@ def _add_dependencies_to_args( else: fail("unrecognized dependency type %s", (dep["type"])) - args.hidden(artifact) + args_hidden.append(artifact) - return args, input_mapping + return cmd_args(hidden = args_hidden), input_mapping -def _add_full_dependencies(erlc_cmd: cmd_args, build_environment: BuildEnvironment) -> cmd_args: +def _full_dependencies(build_environment: BuildEnvironment) -> cmd_args: + erlc_cmd_hidden = [] for artifact in build_environment.full_dependencies: - erlc_cmd.hidden(artifact) - return erlc_cmd + erlc_cmd_hidden.append(artifact) + return cmd_args(hidden = erlc_cmd_hidden) def _dependency_include_dirs(build_environment: BuildEnvironment) -> list[cmd_args]: includes = [ - cmd_args(include_dir_anchor).parent() + cmd_args(include_dir_anchor, parent = 1) for include_dir_anchor in build_environment.private_include_dir ] for include_dir_anchor in build_environment.include_dirs.values(): - includes.append(cmd_args(include_dir_anchor).parent(3)) - includes.append(cmd_args(include_dir_anchor).parent()) + includes.append(cmd_args(include_dir_anchor, parent = 3)) + includes.append(cmd_args(include_dir_anchor, parent = 1)) return includes def _dependency_code_paths(build_environment: BuildEnvironment) -> list[cmd_args]: return [ - cmd_args(ebin_dir_anchor).parent() + cmd_args(ebin_dir_anchor, parent = 1) for ebin_dir_anchor in build_environment.ebin_dirs.values() ] @@ -648,7 +647,7 @@ def _erlc_dependency_args( # A: the whole string would get passed as a single argument, as if it was quoted in CLI e.g. '-I include_path' # ...which the escript cannot parse, as it expects two separate arguments, e.g. '-I' 'include_path' - args = cmd_args([]) + args = cmd_args([], ignore_artifacts = True) # build -I options if path_in_arg: @@ -668,8 +667,6 @@ def _erlc_dependency_args( args.add("-pa") args.add(code_path) - args.ignore_artifacts() - return args def _get_erl_opts( @@ -705,9 +702,9 @@ def _get_erl_opts( for parse_transform, (beam, resource_folder) in parse_transforms.items(): args.add( "+{parse_transform, %s}" % (parse_transform,), - cmd_args(beam, format = "-pa{}").parent(), + cmd_args(beam, format = "-pa{}", parent = 1), ) - args.hidden(resource_folder) + args.add(cmd_args(hidden = resource_folder)) # add relevant compile_info manually args.add(cmd_args( @@ -757,9 +754,13 @@ def _is_erl(in_file: Artifact) -> bool: """ Returns True if the artifact is an erl file """ return _is_ext(in_file, [".erl"]) -def _is_xyrl(in_file: Artifact) -> bool: - """ Returns True if the artifact is a xrl or yrl file """ - return _is_ext(in_file, [".yrl", ".xrl"]) +def _is_yrl(in_file: Artifact) -> bool: + """ Returns True if the artifact is a yrl file """ + return _is_ext(in_file, [".yrl"]) + +def _is_xrl(in_file: Artifact) -> bool: + """ Returns True if the artifact is a xrl file """ + return _is_ext(in_file, [".xrl"]) def _is_ext(in_file: Artifact, extensions: list[str]) -> bool: """ Returns True if the artifact has an extension listed in extensions """ @@ -826,6 +827,28 @@ def _run_with_env(ctx: AnalysisContext, toolchain: Toolchain, *args, **kwargs): kwargs["env"] = env ctx.actions.run(*args, **kwargs) +def _run_escript(ctx: AnalysisContext, toolchain: Toolchain, script: Artifact, args: cmd_args, **kwargs) -> None: + """ run escript with env and providing toolchain-configured utility modules""" + cmd = cmd_args([ + toolchain.otp_binaries.erl, + "+A0", + "+S1:1", + "+sbtu", + "-mode", + "minimal", + "-noinput", + "-noshell", + "-pa", + toolchain.utility_modules, + "-run", + "escript", + "start", + "--", + script, + args, + ]) + _run_with_env(ctx, toolchain, cmd, **kwargs) + def _peek_private_includes( ctx: AnalysisContext, toolchain: Toolchain, @@ -877,12 +900,14 @@ erlang_build = struct( utils = struct( is_hrl = _is_hrl, is_erl = _is_erl, - is_xyrl = _is_xyrl, + is_yrl = _is_yrl, + is_xrl = _is_xrl, module_name = module_name, private_include_name = private_include_name, make_dir_anchor = _make_dir_anchor, build_dir = _build_dir, run_with_env = _run_with_env, + run_escript = _run_escript, peek_private_includes = _peek_private_includes, ), ) diff --git a/prelude/erlang/erlang_escript.bzl b/prelude/erlang/erlang_escript.bzl index ef3cf834d88..73791965263 100644 --- a/prelude/erlang/erlang_escript.bzl +++ b/prelude/erlang/erlang_escript.bzl @@ -7,8 +7,9 @@ load("@prelude//:paths.bzl", "paths") load(":erlang_build.bzl", "erlang_build") -load(":erlang_dependencies.bzl", "check_dependencies", "flatten_dependencies") +load(":erlang_dependencies.bzl", "ErlAppDependencies", "check_dependencies", "flatten_dependencies") load(":erlang_info.bzl", "ErlangAppInfo") +load(":erlang_release.bzl", "build_lib_dir") load( ":erlang_toolchain.bzl", "Toolchain", # @unused Used as type @@ -17,43 +18,60 @@ load( ) load(":erlang_utils.bzl", "action_identifier", "to_term_args") -def create_escript( - ctx: AnalysisContext, - spec_file: Artifact, - toolchain: Toolchain, - files: list[Artifact], - output: Artifact, - escript_name: str) -> None: - """ build the escript with the escript builder tool - """ - script = toolchain.escript_builder - - escript_build_cmd = cmd_args( - [ - toolchain.otp_binaries.escript, - script, - spec_file, - ], - ) - escript_build_cmd.hidden(output.as_output()) - escript_build_cmd.hidden(files) - erlang_build.utils.run_with_env( - ctx, - toolchain, - escript_build_cmd, - category = "escript", - identifier = action_identifier(toolchain, escript_name), - ) - return None - def erlang_escript_impl(ctx: AnalysisContext) -> list[Provider]: # select the correct tools from the toolchain - toolchain_name = get_primary(ctx) toolchain = select_toolchains(ctx)[get_primary(ctx)] # collect all dependencies dependencies = flatten_dependencies(ctx, check_dependencies(ctx.attrs.deps, [ErlangAppInfo])) + if ctx.attrs.bundled: + return _bundled_escript_impl(ctx, dependencies, toolchain) + else: + return _unbundled_escript_impl(ctx, dependencies, toolchain) + +def _unbundled_escript_impl(ctx: AnalysisContext, dependencies: ErlAppDependencies, toolchain: Toolchain) -> list[Provider]: + if ctx.attrs.resources: + fail("resources are not supported with unbundled escripts, add them to an applications priv/ directory instead") + + escript_name = _escript_name(ctx) + + lib_dir = build_lib_dir( + ctx, + toolchain, + escript_name, + dependencies, + ) + + config_files = _escript_config_files(ctx) + escript_trampoline = build_escript_unbundled_trampoline(ctx, toolchain, config_files) + + trampoline = { + "run.escript": escript_trampoline, + } + + all_outputs = {} + for outputs in [lib_dir, trampoline]: + all_outputs.update(outputs) + + for config_file in config_files: + all_outputs[config_file.short_path] = config_file + + output = ctx.actions.symlinked_dir( + escript_name, + all_outputs, + ) + + cmd = cmd_args([ + toolchain.escript_trampoline, + output, + toolchain.otp_binaries.escript, + ]) + + return [DefaultInfo(default_output = output), RunInfo(cmd)] + +def _bundled_escript_impl(ctx: AnalysisContext, dependencies: ErlAppDependencies, toolchain: Toolchain) -> list[Provider]: + toolchain_name = get_primary(ctx) artifacts = {} for dep in dependencies.values(): @@ -81,15 +99,23 @@ def erlang_escript_impl(ctx: AnalysisContext) -> list[Provider]: fail("multiple artifacts defined for path %s", (artifact.short_path)) artifacts[artifact.short_path] = artifact - if ctx.attrs.script_name: - escript_name = ctx.attrs.script_name - else: - escript_name = ctx.attrs.name + ".escript" + # magic tag to make vendored json available to the test binary + if "erlang_test_runner" in ctx.attrs.labels: + artifacts["utility_modules/ebin"] = toolchain.utility_modules + + escript_name = _escript_name(ctx) output = ctx.actions.declare_output(escript_name) args = ctx.attrs.emu_args - if ctx.attrs.main_module: - args += ["-escript", "main", ctx.attrs.main_module] + + config_files = _escript_config_files(ctx) + for config_file in config_files: + artifacts[config_file.short_path] = config_file + + escript_trampoline = build_escript_bundled_trampoline(ctx, toolchain, config_files) + artifacts[escript_trampoline.basename] = escript_trampoline + + args += ["-escript", "main", "erlang_escript_trampoline"] escript_build_spec = { "artifacts": artifacts, @@ -116,8 +142,136 @@ def erlang_escript_impl(ctx: AnalysisContext) -> list[Provider]: RunInfo(escript_cmd), ] +def create_escript( + ctx: AnalysisContext, + spec_file: Artifact, + toolchain: Toolchain, + files: list[Artifact], + output: Artifact, + escript_name: str) -> None: + """ build the escript with the escript builder tool + """ + script = toolchain.escript_builder + + escript_build_cmd = cmd_args( + [ + toolchain.otp_binaries.escript, + script, + spec_file, + ], + hidden = [ + output.as_output(), + files, + ], + ) + + erlang_build.utils.run_with_env( + ctx, + toolchain, + escript_build_cmd, + category = "escript", + identifier = action_identifier(toolchain, escript_name), + ) + return None + +def _escript_name(ctx: AnalysisContext) -> str: + if ctx.attrs.script_name: + return ctx.attrs.script_name + else: + return ctx.attrs.name + ".escript" + +def _main_module(ctx: AnalysisContext) -> str: + if ctx.attrs.main_module: + return ctx.attrs.main_module + else: + return ctx.attrs.name + +def build_escript_unbundled_trampoline(ctx: AnalysisContext, toolchain, config_files: list[Artifact]) -> Artifact: + data = cmd_args() + + data.add("#!/usr/bin/env escript") + data.add("%% -*- erlang -*-") + data.add("%%! {}".format(" ".join(ctx.attrs.emu_args))) + + data.add("-module('{}').".format(_escript_name(ctx))) + data.add("-export([main/1]).") + data.add("main(Args) ->") + data.add("EscriptDir = filename:dirname(escript:script_name()),") + data.add(_config_files_code_to_erl(config_files)) + data.add(' EBinDirs = filelib:wildcard(filename:join([EscriptDir, "lib", "*", "ebin"])),') + data.add(" code:add_paths(EBinDirs),") + data.add(" {}:main(Args).".format(_main_module(ctx))) + data.add(_parse_bin()) + + return ctx.actions.write( + paths.join(erlang_build.utils.build_dir(toolchain), "run.escript"), + data, + is_executable = True, + ) + +def build_escript_bundled_trampoline(ctx: AnalysisContext, toolchain, config_files: list[Artifact]) -> Artifact: + data = cmd_args() + + data.add("-module('erlang_escript_trampoline').") + data.add("-export([main/1]).") + data.add("main(Args) ->") + data.add("EscriptDir = escript:script_name(),") + data.add(_config_files_code_to_erl(config_files)) + data.add(" {}:main(Args).".format(_main_module(ctx))) + data.add(_parse_bin()) + escript_trampoline_erl = ctx.actions.write( + paths.join(erlang_build.utils.build_dir(toolchain), "erlang_escript_trampoline.erl"), + data, + ) + my_output = ctx.actions.declare_output("erlang_escript_trampoline.beam") + + ctx.actions.run( + cmd_args( + toolchain.otp_binaries.erlc, + "-o", + cmd_args(my_output.as_output(), parent = 1), + escript_trampoline_erl, + ), + category = "erlc_escript_trampoline", + ) + + return my_output + def _ebin_path(file: Artifact, app_name: str) -> str: return paths.join(app_name, "ebin", file.basename) def _priv_path(app_name: str) -> str: return paths.join(app_name, "priv") + +def _escript_config_files(ctx: AnalysisContext) -> list[Artifact]: + config_files = [] + for config_dep in ctx.attrs.configs: + for artifact in config_dep[DefaultInfo].default_outputs + config_dep[DefaultInfo].other_outputs: + (_, ext) = paths.split_extension(artifact.short_path) + if ext == ".config": + config_files.append(artifact) + return config_files + +def _config_files_code_to_erl(config_files: list[Artifact]) -> list[str]: + cmd = [] + cmd.append("ConfigFiles = [") + for i in range(0, len(config_files)): + cmd.append('"{}"'.format(config_files[i].short_path)) + if i < len(config_files) - 1: + cmd.append(",") + cmd.append("],") + cmd.append("[begin ") + cmd.append("{ok, AppConfigBin, _FullName} = erl_prim_loader:get_file(filename:join(EscriptDir, ConfigFile)),") + cmd.append("{ok, AppConfig} = parse_bin(AppConfigBin), ") + cmd.append(" ok = application:set_env(AppConfig, [{persistent, true}])") + cmd.append("end || ConfigFile <- ConfigFiles],") + return cmd + +def _parse_bin() -> str: + return """ +parse_bin(<<"">>) -> + []; +parse_bin(Bin) -> + {ok, Tokens, _} = erl_scan:string(binary_to_list(Bin)), + erl_parse:parse_term(Tokens). + """ diff --git a/prelude/erlang/erlang_info.bzl b/prelude/erlang/erlang_info.bzl index eb4c3d085dc..f1fab0250f1 100644 --- a/prelude/erlang/erlang_info.bzl +++ b/prelude/erlang/erlang_info.bzl @@ -91,6 +91,7 @@ ErlangToolchainInfo = provider( "dependency_finalizer": provider_field(typing.Any, default = None), # trampoline rerouting stdout to stderr "erlc_trampoline": provider_field(typing.Any, default = None), + "escript_trampoline": provider_field(typing.Any, default = None), # name to parse_transform artifacts mapping for core parse_transforms (that are always used) and # user defines ones "core_parse_transforms": provider_field(typing.Any, default = None), diff --git a/prelude/erlang/erlang_release.bzl b/prelude/erlang/erlang_release.bzl index 1da7d2c3ef8..d50ae8f2ff8 100644 --- a/prelude/erlang/erlang_release.bzl +++ b/prelude/erlang/erlang_release.bzl @@ -75,7 +75,7 @@ def _build_primary_release(ctx: AnalysisContext, apps: ErlAppDependencies) -> li def _build_release(ctx: AnalysisContext, toolchain: Toolchain, apps: ErlAppDependencies) -> dict[str, Artifact]: # OTP base structure - lib_dir = _build_lib_dir(ctx, toolchain, apps) + lib_dir = build_lib_dir(ctx, toolchain, _relname(ctx), apps) boot_scripts = _build_boot_script(ctx, toolchain, lib_dir["lib"]) # release specific variables in bin/release_variables @@ -100,12 +100,15 @@ def _build_release(ctx: AnalysisContext, toolchain: Toolchain, apps: ErlAppDepen return all_outputs -def _build_lib_dir(ctx: AnalysisContext, toolchain: Toolchain, all_apps: ErlAppDependencies) -> dict[str, Artifact]: +def build_lib_dir( + ctx: AnalysisContext, + toolchain: Toolchain, + release_name: str, + all_apps: ErlAppDependencies) -> dict[str, Artifact]: """Build lib dir according to OTP specifications. .. seealso:: `OTP Design Principles Release Structure `_ """ - release_name = _relname(ctx) build_dir = erlang_build.utils.build_dir(toolchain) link_spec = { @@ -195,12 +198,14 @@ def _build_boot_script( toolchain.otp_binaries.escript, script, spec_file, - cmd_args(release_resource.as_output()).parent(), + cmd_args(release_resource.as_output(), parent = 1), + ], + hidden = [ + start_script.as_output(), + boot_script.as_output(), + lib_dir, ], ) - boot_script_build_cmd.hidden(start_script.as_output()) - boot_script_build_cmd.hidden(boot_script.as_output()) - boot_script_build_cmd.hidden(lib_dir) erlang_build.utils.run_with_env( ctx, diff --git a/prelude/erlang/erlang_shell.bzl b/prelude/erlang/erlang_shell.bzl index 26e84282383..671a37b5cf2 100644 --- a/prelude/erlang/erlang_shell.bzl +++ b/prelude/erlang/erlang_shell.bzl @@ -8,18 +8,20 @@ load("@prelude//:paths.bzl", "paths") load(":erlang_dependencies.bzl", "check_dependencies", "flatten_dependencies") load(":erlang_info.bzl", "ErlangAppInfo") -load(":erlang_toolchain.bzl", "get_primary_tools") +load(":erlang_toolchain.bzl", "get_primary", "get_primary_tools") def _build_run_info( ctx: AnalysisContext, + *, dependencies: list[Dependency], additional_app_paths: list[Artifact] = [], additional_paths: list[Artifact] = [], additional_args: list[cmd_args] = []) -> Provider: """Builds an Erlang shell with the dependencies and additional code paths available.""" + primary_toolchain_name = get_primary(ctx) app_paths = [ - dep[ErlangAppInfo].app_folder + dep[ErlangAppInfo].app_folders[primary_toolchain_name] for dep in dependencies if ErlangAppInfo in dep and not dep[ErlangAppInfo].virtual ] + additional_app_paths @@ -29,18 +31,19 @@ def _build_run_info( for dep in all_shell_dependencies.values(): if dep[ErlangAppInfo].virtual: continue - app_paths.append(dep[ErlangAppInfo].app_folder) + app_paths.append(dep[ErlangAppInfo].app_folders[primary_toolchain_name]) erl_args = cmd_args([]) for app_path in app_paths: - erl_args.add(cmd_args(app_path, format = "-pa \"${REPO_ROOT}\"/{}/ebin \\", delimiter = "")) + erl_args.add(cmd_args(app_path, format = "-pa \"${REPO_ROOT}\"/{}/ebin", delimiter = "")) for additional_path in additional_paths: - erl_args.add(cmd_args(additional_path, format = "-pa \"${REPO_ROOT}\"/{} \\", delimiter = "")) + erl_args.add(cmd_args(additional_path, format = "-pa \"${REPO_ROOT}\"/{}", delimiter = "")) # add configs + config_files = _shell_config_files(ctx) for config_file in _shell_config_files(ctx): - erl_args.add(cmd_args(config_file, format = "-config \"${REPO_ROOT}\"/{} \\", delimiter = "")) + erl_args.add(cmd_args(config_file, format = "-config \"${REPO_ROOT}\"/{}", delimiter = "")) # add extra args for additional_args in additional_args: @@ -49,18 +52,24 @@ def _build_run_info( erl_args.add('"$@"') tools = get_primary_tools(ctx) - content = cmd_args([]) - content = content.add("REPO_ROOT=$(buck2 root --kind=project)") - content.add(cmd_args(["\"${REPO_ROOT}\"/", cmd_args(tools.erl, delimiter = " "), " \\"], delimiter = "")) - content.add(erl_args) - content.add("") + erl_command = cmd_args([ + "exec", + cmd_args(["\"${REPO_ROOT}\"/", cmd_args(tools.erl, delimiter = " ")], delimiter = ""), + erl_args, + ]) - shell_script = ctx.actions.write("start_shell.sh", content) - shell_cmd = cmd_args(["/usr/bin/env", "bash", shell_script]) + start_shell_content = cmd_args([ + "export REPO_ROOT=$(buck2 root --kind=project)", + cmd_args(erl_command, delimiter = " \\\n"), + "", + ]) - # depend on input paths - for code_path in app_paths + additional_paths: - shell_cmd.hidden(code_path) + shell_script = ctx.actions.write("start_shell.sh", start_shell_content, with_inputs = True) + shell_cmd = cmd_args( + ["/usr/bin/env", "bash", shell_script], + # depend on input paths + hidden = app_paths + additional_paths + config_files, + ) return RunInfo(shell_cmd) diff --git a/prelude/erlang/erlang_tests.bzl b/prelude/erlang/erlang_tests.bzl index 15a4bf797f0..2ec15a246b7 100644 --- a/prelude/erlang/erlang_tests.bzl +++ b/prelude/erlang/erlang_tests.bzl @@ -24,6 +24,7 @@ load(":erlang_shell.bzl", "erlang_shell") load( ":erlang_toolchain.bzl", "get_primary", + "get_primary_tools", "select_toolchains", ) load( @@ -31,7 +32,6 @@ load( "file_mapping", "list_dedupe", "preserve_structure", - "to_term_args", ) def erlang_tests_macro( @@ -43,7 +43,8 @@ def erlang_tests_macro( property_tests: list[str] = [], srcs: list[str] = [], prefix: str | None = None, - **common_attributes: dict) -> None: + generated_app_labels: list[str] = [], + **common_attributes) -> None: """ Generate multiple erlang_test targets based on the `suites` field. Also adds the default 'config' and 'deps' from the buck2 config. @@ -64,7 +65,7 @@ def erlang_tests_macro( erlang_app_rule( name = srcs_app, srcs = srcs, - labels = ["generated", "test_application", "test_utils"], + labels = generated_app_labels, applications = app_deps, ) deps.append(":" + srcs_app) @@ -77,7 +78,7 @@ def erlang_tests_macro( if prop_target: property_tests = [prop_target] - common_attributes["labels"] = common_attributes.get("labels", []) + ["tpx-enable-artifact-reporting", "test-framework=39:erlang_common_test"] + common_attributes["labels"] = common_attributes.get("labels", []) common_attributes["labels"] = list_dedupe(common_attributes["labels"]) @@ -112,6 +113,7 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: toolchains = select_toolchains(ctx) primary_toolchain_name = get_primary(ctx) primary_toolchain = toolchains[primary_toolchain_name] + tools = get_primary_tools(ctx) deps = ctx.attrs.deps + [ctx.attrs._test_binary_lib] @@ -153,15 +155,39 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: # Config files for ct config_files = [config_file[DefaultInfo].default_outputs[0] for config_file in ctx.attrs.config_files] - test_binary = ctx.attrs._test_binary[DefaultInfo].default_outputs + trampolines = ctx.attrs._trampolines + if ctx.attrs._trampoline != None: + if trampolines != None: + fail("_trampoline and _trampolines can't be both provided") + trampolines = [ctx.attrs._trampoline] - trampoline = ctx.attrs._trampoline cmd = cmd_args([]) - if trampoline: - cmd.add(trampoline[RunInfo]) - - cmd.add(primary_toolchain.otp_binaries.escript) - cmd.add(test_binary) + if trampolines: + cmd.add(*[trampoline[RunInfo] for trampoline in trampolines]) + + binary_lib_deps = flatten_dependencies(ctx, check_dependencies([ctx.attrs._test_binary_lib], [ErlangAppInfo])) + cmd.add([ + tools.erl, + "-mode", + "minimal", + "-noinput", + "-noshell", + "+A0", + "+S1:1", + "+sbtu", + "-run", + "test_binary", # provided by ctx.attr._test_binary_lib + "main", + ]) + + for dep in binary_lib_deps.values(): + if dep[ErlangAppInfo].virtual: + continue + app_folder = dep[ErlangAppInfo].app_folders[primary_toolchain_name] + cmd.add(["-pa", cmd_args(app_folder, format = "{}/ebin", delimiter = "")]) + cmd.add(["-pa", primary_toolchain.utility_modules]) + + cmd.add(["--"]) suite = ctx.attrs.suite suite_name = module_name(suite) @@ -183,6 +209,7 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: output_dir = link_output(ctx, suite_name, build_environment, data_dir, property_dir) test_info_file = _write_test_info_file( ctx = ctx, + extra_code_paths = [primary_toolchain.utility_modules], test_suite = suite_name, dependencies = dependencies, test_dir = output_dir, @@ -191,23 +218,34 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: ) cmd.add(test_info_file) - default_info = _build_default_info(dependencies, output_dir) + hidden_args = [] + + default_info = _build_default_info(ctx, dependencies, output_dir) for output_artifact in default_info.other_outputs: - cmd.hidden(output_artifact) + hidden_args.append(output_artifact) for config_file in config_files: - cmd.hidden(config_file) + hidden_args.append(config_file) - cmd.hidden(output_dir) + hidden_args.append(primary_toolchain.utility_modules) + hidden_args.append(output_dir) + cmd.add(cmd_args(hidden = hidden_args)) # prepare shell dependencies - additional_paths = [ + additional_shell_paths = [ dep[ErlangTestInfo].output_dir for dep in dependencies.values() if ErlangTestInfo in dep - ] + [output_dir] + ] + [primary_toolchain.utility_modules, output_dir] + + # NB. We can't use `quote="shell"` since we need $REPO_ROOT to be expanded by the shell. + # So we wrap everything in extra double-quotes to protect from spaces in the path + test_info_file_arg = cmd_args(test_info_file, format = '"<<\\"${REPO_ROOT}/{}\\">>"') - preamble = '-eval "%s" \\' % (ctx.attrs.preamble) - additional_args = [cmd_args(preamble, "-noshell \\")] + additional_shell_args = cmd_args([ + cmd_args(["-test_cli_lib", "test_info_file", test_info_file_arg], delimiter = " "), + cmd_args("-eval", ctx.attrs.preamble, quote = "shell", delimiter = " "), + "-noshell", + ]) all_direct_shell_dependencies = check_dependencies([ctx.attrs._cli_lib], [ErlangAppInfo]) cli_lib_deps = flatten_dependencies(ctx, all_direct_shell_dependencies) @@ -217,9 +255,9 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: run_info = erlang_shell.build_run_info( ctx, - shell_deps.values(), - additional_paths = additional_paths, - additional_args = additional_args, + dependencies = shell_deps.values(), + additional_paths = additional_shell_paths, + additional_args = [additional_shell_args], ) re_executor = get_re_executor_from_props(ctx) @@ -231,7 +269,7 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: type = "erlang_test", command = [cmd], env = ctx.attrs.env, - labels = ["tpx-fb-test-type=16"] + ctx.attrs.labels, + labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, run_from_project_root = True, use_project_relative_paths = True, @@ -245,13 +283,14 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: ] # Copied from erlang_application. -def _build_default_info(dependencies: ErlAppDependencies, output_dir: Artifact) -> Provider: +def _build_default_info(ctx: AnalysisContext, dependencies: ErlAppDependencies, output_dir: Artifact) -> Provider: """ generate default_outputs and DefaultInfo provider """ + primary_toolchain_name = get_primary(ctx) outputs = [] for dep in dependencies.values(): if ErlangAppInfo in dep and not dep[ErlangAppInfo].virtual: - outputs.append(dep[ErlangAppInfo].app_folder) + outputs.append(dep[ErlangAppInfo].app_folders[primary_toolchain_name]) if ErlangTestInfo in dep: outputs += dep[DefaultInfo].default_outputs outputs += dep[DefaultInfo].other_outputs @@ -259,18 +298,21 @@ def _build_default_info(dependencies: ErlAppDependencies, output_dir: Artifact) def _write_test_info_file( ctx: AnalysisContext, + extra_code_paths: list[Artifact], test_suite: str, dependencies: ErlAppDependencies, test_dir: Artifact, config_files: list[Artifact], erl_cmd: [cmd_args, Artifact]) -> Artifact: + dependency_paths = _list_code_paths(ctx, dependencies) + dependency_paths.extend(extra_code_paths) tests_info = { "artifact_annotation_mfa": ctx.attrs._artifact_annotation_mfa, "common_app_env": ctx.attrs.common_app_env, "config_files": config_files, "ct_opts": ctx.attrs._ct_opts, - "dependencies": _list_code_paths(dependencies), - "erl_cmd": cmd_args(['"', cmd_args(erl_cmd, delimiter = " "), '"'], delimiter = ""), + "dependencies": dependency_paths, + "erl_cmd": erl_cmd, "extra_ct_hooks": ctx.attrs.extra_ct_hooks, "extra_flags": ctx.attrs.extra_erl_flags, "providers": ctx.attrs._providers, @@ -278,30 +320,28 @@ def _write_test_info_file( "test_suite": test_suite, } test_info_file = ctx.actions.declare_output("tests_info") - ctx.actions.write( - test_info_file, - to_term_args(tests_info), - ) + ctx.actions.write_json(test_info_file, tests_info) return test_info_file -def _list_code_paths(dependencies: ErlAppDependencies) -> list[cmd_args]: +def _list_code_paths(ctx: AnalysisContext, dependencies: ErlAppDependencies) -> list[[Artifact, cmd_args]]: """lists all ebin/ dirs from the test targets dependencies""" + primary_toolchain_name = get_primary(ctx) folders = [] for dependency in dependencies.values(): if ErlangAppInfo in dependency: dep_info = dependency[ErlangAppInfo] - if dep_info.virtual: - continue - folders.append(cmd_args( - dep_info.app_folder, - format = '"{}/ebin"', - )) + if not dep_info.virtual: + folders.append(cmd_args( + dep_info.app_folders[primary_toolchain_name], + format = "{}/ebin", + delimiter = "", + )) elif ErlangTestInfo in dependency: dep_info = dependency[ErlangTestInfo] - folders.append(cmd_args(dep_info.output_dir, format = '"{}"')) + folders.append(dep_info.output_dir) return folders -def _build_resource_dir(ctx, resources: list, target_dir: str) -> Artifact: +def _build_resource_dir(ctx: AnalysisContext, resources: list, target_dir: str) -> Artifact: """ build mapping for suite data directory generating the necessary mapping information for the suite data directory diff --git a/prelude/erlang/erlang_toolchain.bzl b/prelude/erlang/erlang_toolchain.bzl index 94106e76982..b74e5ded533 100644 --- a/prelude/erlang/erlang_toolchain.bzl +++ b/prelude/erlang/erlang_toolchain.bzl @@ -41,6 +41,7 @@ Toolchain = record( dependency_analyzer = field(Artifact), dependency_finalizer = field(Artifact), erlc_trampoline = field(Artifact), + escript_trampoline = field(Artifact), escript_builder = field(Artifact), otp_binaries = field(Tools), release_variables_builder = field(Artifact), @@ -65,6 +66,7 @@ ToolchainUtillInfo = provider( "dependency_finalizer": provider_field(typing.Any, default = None), "edoc": provider_field(typing.Any, default = None), "erlc_trampoline": provider_field(typing.Any, default = None), + "escript_trampoline": provider_field(typing.Any, default = None), "escript_builder": provider_field(typing.Any, default = None), "release_variables_builder": provider_field(typing.Any, default = None), "include_erts": provider_field(typing.Any, default = None), @@ -97,6 +99,7 @@ def _multi_version_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: dependency_finalizer = toolchain_info.dependency_finalizer, erl_opts = toolchain_info.erl_opts, erlc_trampoline = toolchain_info.erlc_trampoline, + escript_trampoline = toolchain_info.escript_trampoline, escript_builder = toolchain_info.escript_builder, otp_binaries = toolchain_info.otp_binaries, release_variables_builder = toolchain_info.release_variables_builder, @@ -126,9 +129,6 @@ multi_version_toolchain_rule = rule( is_toolchain_rule = True, ) -def as_target(name: str) -> str: - return ":" + name - def _config_erlang_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: """ rule for erlang toolchain """ @@ -142,10 +142,8 @@ def _config_erlang_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: # get otp binaries binaries_info = ctx.attrs.otp_binaries[ErlangOTPBinariesInfo] erl = cmd_args([binaries_info.erl] + emu_flags) - erlc = cmd_args(binaries_info.erlc) - escript = cmd_args(binaries_info.escript) - erlc.hidden(binaries_info.erl) - escript.hidden(binaries_info.erl) + erlc = cmd_args(binaries_info.erlc, hidden = binaries_info.erl) + escript = cmd_args(binaries_info.escript, hidden = binaries_info.erl) tools_binaries = ToolsBinaries( erl = binaries_info.erl, erlc = binaries_info.erl, @@ -191,6 +189,7 @@ def _config_erlang_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: env = ctx.attrs.env, emu_flags = emu_flags, erlc_trampoline = utils.erlc_trampoline, + escript_trampoline = utils.escript_trampoline, escript_builder = utils.escript_builder, otp_binaries = otp_binaries, release_variables_builder = utils.release_variables_builder, @@ -270,7 +269,7 @@ def _gen_parse_transform_beam( erlc, "+deterministic", "-o", - cmd_args(output.as_output()).parent(), + cmd_args(output.as_output(), parent = 1), src, ]) ctx.actions.run(cmd, category = "erlc", identifier = src.short_path) @@ -307,7 +306,7 @@ def _gen_util_beams( erlc, "+deterministic", "-o", - cmd_args(output.as_output()).parent(), + cmd_args(output.as_output(), parent = 1), src, ], category = "erlc", @@ -362,6 +361,7 @@ def _toolchain_utils(ctx: AnalysisContext) -> list[Provider]: dependency_finalizer = ctx.attrs.dependency_finalizer, edoc = ctx.attrs.edoc, erlc_trampoline = ctx.attrs.erlc_trampoline, + escript_trampoline = ctx.attrs.escript_trampoline, escript_builder = ctx.attrs.escript_builder, release_variables_builder = ctx.attrs.release_variables_builder, include_erts = ctx.attrs.include_erts, @@ -380,32 +380,9 @@ toolchain_utilities = rule( "edoc": attrs.source(), "erlc_trampoline": attrs.source(), "escript_builder": attrs.source(), + "escript_trampoline": attrs.source(), "include_erts": attrs.source(), "release_variables_builder": attrs.source(), "utility_modules": attrs.list(attrs.source()), }, ) - -# Resources that need to be plugged in through toolchain// : -# - jsone - -toolchain_resources = rule( - impl = lambda ctx: [ - DefaultInfo( - sub_targets = { - "jsone": ctx.attrs.jsone.providers, - }, - ), - ], - attrs = { - "jsone": attrs.dep(), - }, - is_toolchain_rule = True, -) - -toolchain_resources_internal = rule( - impl = lambda ctx: ctx.attrs._resources.providers, - attrs = { - "_resources": attrs.toolchain_dep(default = "toolchains//:erlang-resources"), - }, -) diff --git a/prelude/erlang/erlang_utils.bzl b/prelude/erlang/erlang_utils.bzl index dcb20b3dfe5..8ac863d6726 100644 --- a/prelude/erlang/erlang_utils.bzl +++ b/prelude/erlang/erlang_utils.bzl @@ -11,22 +11,17 @@ load( "Toolchain", # @unused Used as type ) -def normalise_metadata(data: [str, list[str]]) -> [cmd_args, list[cmd_args]]: - if type(data) == type([]): - return [cmd_args(item) for item in data] - else: - return cmd_args(data) - def to_term_args(data: typing.Any) -> cmd_args: """ convert nested lists/tuple/map data structure to Erlang Term cmd_args """ - args = cmd_args([]) - args.add(cmd_args([ - convert(data), - ".", - ], delimiter = "")) - args.add("") - return args + + return cmd_args( + cmd_args([ + convert(data), + ".", + ], delimiter = ""), + "", + ) # paths def app_file(ctx: AnalysisContext) -> str: @@ -48,7 +43,7 @@ build_paths = struct( linktree = linktree, ) -def convert(data: typing.Any) -> cmd_args: +def convert(data: typing.Any, ignore_artifacts: bool = False) -> cmd_args: """ converts a lists/tuple/map data structure to a sub-term that can be embedded in another to_term_args or convert """ if type(data) == "list": @@ -64,57 +59,50 @@ def convert(data: typing.Any) -> cmd_args: elif type(data) == "bool": return convert_bool(data) - args = cmd_args([]) - args.add(cmd_args(["\"", data, "\""], delimiter = "")) - return args + return cmd_args( + cmd_args(["\"", data, "\""], delimiter = ""), + ignore_artifacts = ignore_artifacts, + ) # internal def convert_list(ls: list, ob: str = "[", cb: str = "]") -> cmd_args: - args = cmd_args([]) - args.add(ob) + args = [] + args.append(ob) if len(ls) >= 1: - args.add(cmd_args([ + args.append(cmd_args([ convert(ls[0]), ], delimiter = "")) for item in ls[1:]: - args.add(cmd_args([ + args.append(cmd_args([ ",", convert(item), ], delimiter = "")) - args.add(cb) - return args + args.append(cb) + return cmd_args(args) def convert_dict(dt: dict) -> cmd_args: - args = cmd_args([]) - args.add("#{") + args = [] + args.append("#{") items = list(dt.items()) if len(items) >= 1: k, v = items[0] - args.add(cmd_args([ + args.append(cmd_args([ convert(k), "=>", convert(v), ], delimiter = "")) for k, v in items[1:]: - args.add(cmd_args([ + args.append(cmd_args([ ",", convert(k), "=>", convert(v), ], delimiter = "")) - args.add("}") - return args - -def convert_args(data: cmd_args) -> cmd_args: - args = cmd_args() - args.add("\"") - args.add(cmd_args(data, delimiter = " ")) - args.add("\"") - return args + args.append("}") + return cmd_args(args) def convert_string(st: str) -> cmd_args: - args = cmd_args() - return args.add(cmd_args(["\"", st.replace("\"", "\\\""), "\""], delimiter = "")) + return cmd_args(cmd_args(["\"", st.replace("\"", "\\\""), "\""], delimiter = "")) def convert_bool(bl: bool) -> cmd_args: if bl: @@ -141,15 +129,6 @@ def action_identifier(toolchain: Toolchain, name: str) -> str: """builds an action identifier parameterized by the toolchain""" return "%s(%s)" % (name, toolchain.name) -def str_to_bool(value: str) -> bool: - """convert string representation of bool to bool""" - if value == "True": - return True - elif value == "False": - return False - else: - fail("{} is not a valid boolean value") - def preserve_structure(path: str) -> dict[str, list[str]]: """Return a mapping from a path that preserves the filestructure relative to the path.""" all_files = glob([paths.join(path, "**")]) diff --git a/prelude/erlang/shell/BUCK.v2 b/prelude/erlang/shell/BUCK.v2 index 5f86cd6414e..29aa0288bf6 100644 --- a/prelude/erlang/shell/BUCK.v2 +++ b/prelude/erlang/shell/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + erlang_application( name = "buck2_shell_utils", srcs = glob(["src/*.erl"]), @@ -11,7 +17,6 @@ erlang_application( ], included_applications = [ "prelude//erlang/common_test/test_exec:test_exec", - "prelude//erlang/toolchain:resources[jsone]", ], shell_libs = [], visibility = ["PUBLIC"], diff --git a/prelude/erlang/shell/src/shell_buck2_module_search.erl b/prelude/erlang/shell/src/shell_buck2_module_search.erl new file mode 100644 index 00000000000..2550f8e9756 --- /dev/null +++ b/prelude/erlang/shell/src/shell_buck2_module_search.erl @@ -0,0 +1,96 @@ +%% Copyright (c) Meta Platforms, Inc. and affiliates. +%% +%% This source code is licensed under both the MIT license found in the +%% LICENSE-MIT file in the root directory of this source tree and the Apache +%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%% of this source tree. + +%%%------------------------------------------------------------------- +%%% @doc +%%% Configurable hook for module discovery +%%% @end +%%% % @format + +-module(shell_buck2_module_search). + +-export([find_module/1, find_module_source/1]). + +-callback find_module_source(module()) -> + {source, file:filename_all()} + | {error, not_found | {ambiguous, [file:filename_all()]}}. + +-spec find_module(atom()) -> + available + | {source, file:filename_all()} + | {error, not_found | {ambiguous, [file:filename_all()]}}. +find_module(Module) -> + WantedModuleName = atom_to_list(Module), + case + [ + found + || {ModuleName, _, _} <- code:all_available(), + string:equal(WantedModuleName, ModuleName) + ] + of + [found] -> + available; + _ -> + _ = application:load(buck2_shell_utils), + % elp:ignore W0011 (application_get_env) + case application:get_env(buck2_shell_utils, search_module) of + {ok, Mod} -> + Mod:find_module_source(Module); + _ -> + find_module_source(Module) + end + end. + +-spec find_module_source(module()) -> + {source, file:filename_all()} + | {error, not_found | {ambiguous, [file:filename_all()]}}. +find_module_source(Module) -> + Root = shell_buck2_utils:cell_root(), + io:format("use ~s as root", [Root]), + {ok, Output} = shell_buck2_utils:run_command( + "find ~s -type d " + "\\( -path \"~s/_build*\" -path \"~s/erl/_build*\" -o -path ~s/buck-out \\) -prune " + "-o -name '~s.erl' -print", + [Root, Root, Root, Root, Module] + ), + case + [ + RelPath + || RelPath <- [ + string:prefix(Path, [Root, "/"]) + || Path <- string:split(Output, "\n", all) + ], + RelPath =/= nomatch, + string:prefix(RelPath, "buck-out") == nomatch, + string:str(binary_to_list(RelPath), "_build") == 0 + ] + of + [ModulePath] -> + {source, ModulePath}; + [] -> + {error, not_found}; + Candidates -> + %% check if there are actually targets associated + {ok, RawOutput} = shell_buck2_utils:buck2_query( + "owner(\\\"\%s\\\")", "--json", Candidates + ), + SourceTargetMapping = json:decode(RawOutput), + case + maps:fold( + fun + (_Source, [], Acc) -> Acc; + (Source, _, Acc) -> [Source | Acc] + end, + [], + SourceTargetMapping + ) + of + [] -> {error, not_found}; + [Source] -> {source, Source}; + More -> {error, {ambiguous, More}} + end + end. diff --git a/prelude/erlang/shell/src/shell_buck2_utils.erl b/prelude/erlang/shell/src/shell_buck2_utils.erl index baa88f12d38..488e884d059 100644 --- a/prelude/erlang/shell/src/shell_buck2_utils.erl +++ b/prelude/erlang/shell/src/shell_buck2_utils.erl @@ -17,10 +17,11 @@ %% Public API -export([ project_root/0, + cell_root/0, rebuild_modules/1, buck2_build_targets/1, buck2_query/1, buck2_query/2, buck2_query/3, - run_command/2, + run_command/2, run_command/3, get_additional_paths/1 ]). @@ -28,7 +29,15 @@ -spec project_root() -> file:filename(). project_root() -> - case run_command("buck2 root --kind=project 2>/dev/null", [], [{at_root, false}, {replay, false}]) of + root(project). + +-spec cell_root() -> file:filename(). +cell_root() -> + root(cell). + +-spec root(Type :: cell | project) -> file:filename(). +root(Type) -> + case run_command("buck2 root --kind=~s 2>/dev/null", [Type], [{at_root, false}, {replay, false}]) of {ok, Output} -> Dir = string:trim(Output), case filelib:is_dir(Dir) of @@ -39,20 +48,6 @@ project_root() -> error(failed_to_query_project_root) end. --spec project_cell() -> binary(). -project_cell() -> - ProjectRoot = project_root(), - case run_command("buck2 audit cell --json 2>/dev/null", [], [{replay, false}]) of - {ok, Output} -> - [ProjectCell] = [ - Cell - || {Cell, CellRoot} <- maps:to_list(jsone:decode(Output)), string:equal(ProjectRoot, CellRoot) - ], - ProjectCell; - error -> - error(failed_to_query_project_cell) - end. - -spec rebuild_modules([module()]) -> ok | error. rebuild_modules([]) -> ok; @@ -138,13 +133,10 @@ port_loop(Port, Replay, StdOut) -> -spec get_additional_paths(file:filename_all()) -> [file:filename_all()]. get_additional_paths(Path) -> - PrefixedPath = io_lib:format("~s//~s", [project_cell(), Path]), case run_command( "buck2 bxl --reuse-current-config --console super prelude//erlang/shell/shell.bxl:ebin_paths -- --source ~s", - [ - PrefixedPath - ] + [Path] ) of {ok, Output} -> diff --git a/prelude/erlang/shell/src/user_default.erl b/prelude/erlang/shell/src/user_default.erl index 4637cfd0b7a..5390983a74d 100644 --- a/prelude/erlang/shell/src/user_default.erl +++ b/prelude/erlang/shell/src/user_default.erl @@ -44,80 +44,15 @@ c(Module, _Options, _Filter) -> -spec l(module()) -> code:load_ret(). l(Module) -> - case find_module(Module) of + case shell_buck2_module_search:find_module(Module) of available -> c:l(Module); {source, RelSource} -> - Paths = shell_buck2_utils:get_additional_paths(RelSource), + AbsSource = filename:absname(RelSource), + Paths = shell_buck2_utils:get_additional_paths(AbsSource), ok = code:add_paths(Paths), ok = ct_daemon:push_paths(Paths), c:l(Module); Error -> Error end. - --spec find_module(module()) -> - available - | {source, file:filename_all()} - | {error, not_found | {ambiguous, [file:filename_all()]}}. -find_module(Module) -> - WantedModuleName = atom_to_list(Module), - case - [ - found - || {ModuleName, _, _} <- code:all_available(), - string:equal(WantedModuleName, ModuleName) - ] - of - [found] -> available; - _ -> find_module_source(Module) - end. - --spec find_module_source(module()) -> - {source, file:filename_all()} - | {error, not_found | {ambiguous, [file:filename_all()]}}. -find_module_source(Module) -> - Root = shell_buck2_utils:project_root(), - {ok, Output} = shell_buck2_utils:run_command( - "find ~s -type d " - "\\( -path \"~s/_build*\" -path \"~s/erl/_build*\" -o -path ~s/buck-out \\) -prune " - "-o -name '~s.erl' -print", - [Root, Root, Root, Root, Module] - ), - case - [ - RelPath - || RelPath <- [ - string:prefix(Path, [Root, "/"]) - || Path <- string:split(Output, "\n", all) - ], - RelPath =/= nomatch, - string:prefix(RelPath, "buck-out") == nomatch, - string:str(binary_to_list(RelPath), "_build") == 0 - ] - of - [ModulePath] -> - {source, ModulePath}; - [] -> - {error, not_found}; - Candidates -> - %% check if there are actually targets associated - {ok, RawOutput} = shell_buck2_utils:buck2_query( - "owner(\\\"\%s\\\")", "--json", Candidates - ), - SourceTargetMapping = jsone:decode(RawOutput), - case - maps:fold( - fun - (_Source, [], Acc) -> Acc; - (Source, _, Acc) -> [Source | Acc] - end, - [], - SourceTargetMapping - ) - of - [] -> {error, not_found}; - [Source] -> {source, Source}; - More -> {error, {ambiguous, More}} - end - end. diff --git a/prelude/erlang/toolchain/BUCK.v2 b/prelude/erlang/toolchain/BUCK.v2 index 3e0d26ce0bf..33e43e8e403 100644 --- a/prelude/erlang/toolchain/BUCK.v2 +++ b/prelude/erlang/toolchain/BUCK.v2 @@ -1,4 +1,9 @@ -load("@prelude//erlang:erlang_toolchain.bzl", "erlang_parse_transform", "toolchain_resources_internal", "toolchain_utilities") +load("@prelude//erlang:erlang_toolchain.bzl", "erlang_parse_transform", "toolchain_utilities") +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() erlang_parse_transform( name = "transform_project_root", @@ -27,16 +32,14 @@ toolchain_utilities( edoc = "edoc_cli.escript", erlc_trampoline = "erlc_trampoline.sh", escript_builder = "escript_builder.escript", + escript_trampoline = "escript_trampoline.sh", include_erts = "include_erts.escript", release_variables_builder = "release_variables_builder.escript", utility_modules = [ "edoc_doclet_chunks.erl", "edoc_report.erl", + "epp_dodger.erl", + "json.erl", ], visibility = ["PUBLIC"], ) - -toolchain_resources_internal( - name = "resources", - visibility = ["PUBLIC"], -) diff --git a/prelude/erlang/toolchain/app_src_builder.escript b/prelude/erlang/toolchain/app_src_builder.escript index 477a6f9576e..9b3ff831f2c 100644 --- a/prelude/erlang/toolchain/app_src_builder.escript +++ b/prelude/erlang/toolchain/app_src_builder.escript @@ -16,23 +16,22 @@ %%% .app.src file. %%% %%% usage: -%%% app_src_builder.escript app_info.term +%%% app_src_builder.escript app_info.json %%% -%%% app_info.term format: +%%% app_info.json format: %%% -%%% The file must contain only a single term which is a map -%%% with the following spec: +%%% The file must contain only a single JSON map with the following spec: %%% %%% #{ -%%% "name" := , -%%% "output" := , -%%% "sources" := [], -%%% "applications" := [], -%%% "included_applications" := I[], -%%% "template" => , -%%% "version" => , -%%% "env" => [application env variable], -%%% "metadata" => map of metadata +%%% <<"name">> := , +%%% <<"output">> := , +%%% <<"sources">> := [], +%%% <<"applications">> := [], +%%% <<"included_applications">> := I[], +%%% <<"template">> => , +%%% <<"version">> => , +%%% <<"env">> => [application env variable], +%%% <<"metadata">> => map of metadata %%% } %%% %%% @end @@ -58,7 +57,7 @@ main(_) -> -spec usage() -> ok. usage() -> - io:format("app_src_builder.escript app_info.term~n"). + io:format("app_src_builder.escript app_info.json~n"). -spec do(file:filename()) -> ok. do(AppInfoFile) -> @@ -73,7 +72,7 @@ do(AppInfoFile) -> mod := Mod, env := Env, metadata := Metadata - } = do_parse_app_info_file(AppInfoFile), + } = AppInfo = do_parse_app_info_file(AppInfoFile), VerifiedTerms = check_and_normalize_template( Name, Version, @@ -94,36 +93,37 @@ do(AppInfoFile) -> output := file:filename() }. do_parse_app_info_file(AppInfoFile) -> - case file:consult(AppInfoFile) of - {ok, [ - #{ - "name" := Name, - "output" := Output, - "sources" := Sources, - "applications" := Applications, - "included_applications" := IncludedApplications - } = Terms - ]} -> - Template = get_template(maps:get("template", Terms, undefined)), - Mod = get_mod(Name, maps:get("mod", Terms, undefined)), - Env = get_env(maps:get("env", Terms, undefined)), - Metadata = get_metadata(maps:get("metadata", Terms, undefined)), - #{ - name => Name, - sources => Sources, - vsn => maps:get("version", Terms, undefined), - output => Output, - template => Template, - applications => - normalize_application([list_to_atom(App) || App <- Applications]), - included_applications => - [list_to_atom(App) || App <- IncludedApplications], - mod => Mod, - env => Env, - metadata => Metadata - }; - {ok, Terms} -> - file_corrupt_error(AppInfoFile, Terms); + case file:read_file(AppInfoFile) of + {ok, Content} -> + case json:decode(Content) of + #{ + <<"name">> := Name, + <<"output">> := Output, + <<"sources">> := Sources, + <<"applications">> := Applications, + <<"included_applications">> := IncludedApplications + } = Terms -> + Template = get_template(maps:get(<<"template">>, Terms, undefined)), + Mod = get_mod(Name, maps:get(<<"mod">>, Terms, undefined)), + Env = get_env(Name, maps:get(<<"env">>, Terms, undefined)), + Metadata = get_metadata(Name, maps:get(<<"metadata">>, Terms, undefined)), + #{ + name => Name, + sources => Sources, + vsn => maps:get(<<"version">>, Terms, undefined), + output => Output, + template => Template, + applications => + normalize_application([binary_to_atom(App) || App <- Applications]), + included_applications => + [binary_to_atom(App) || App <- IncludedApplications], + mod => Mod, + env => Env, + metadata => Metadata + }; + Terms -> + file_corrupt_error(AppInfoFile, Terms) + end; Error -> open_file_error(AppInfoFile, Error) end. @@ -138,34 +138,50 @@ get_template(TemplateFile) -> Error -> open_file_error(TemplateFile, Error) end. --spec get_mod(string(), {string(), [string()]} | undefined) -> mod(). +-spec get_mod(binary(), [binary() | [binary()]] | undefined) -> mod(). get_mod(_, undefined) -> undefined; -get_mod(AppName, {ModuleName, StringArgs}) -> - ModString = unicode:characters_to_list([ - "{", ModuleName, ",[", lists:join(",", StringArgs), "]}." - ]), +get_mod(AppName, [ModuleName, StringArgs]) -> + parse_term( + AppName, + ["{", ModuleName, ",[", lists:join(",", StringArgs), "]}"], + "mod field" + ). + +-spec parse_term(binary(), iolist(), string()) -> term(). +parse_term(AppName, RawString, ErrorDescription) -> + String = unicode:characters_to_list([RawString | "."]), try - {ok, Tokens, _EndLine} = erl_scan:string(ModString), + {ok, Tokens, _EndLine} = erl_scan:string(String), {ok, Term} = erl_parse:parse_term(Tokens), Term catch - _:_ -> module_filed_error(AppName, ModString) + _:_ -> parse_error(AppName, String, ErrorDescription) end. --spec get_env(map() | undefined) -> [tuple()] | undefined. -get_env(undefined) -> undefined; -get_env(Env) -> - [{list_to_atom(K), V} || {K, V} <- maps:to_list(Env)]. - --spec get_metadata(map() | undefined) -> map(). -get_metadata(undefined) -> #{}; -get_metadata(Metadata) -> - maps:from_list([{list_to_atom(K), V} || {K, V} <- maps:to_list(Metadata)]). +-spec get_env(binary(), map() | undefined) -> [tuple()] | undefined. +get_env(_Name, undefined) -> + undefined; +get_env(Name, Env) -> + [ + {binary_to_atom(K), parse_term(Name, V, io_lib:format("env value for ~ts", [K]))} + || K := V <- maps:iterator(Env, ordered) + ]. + +-spec get_metadata(binary(), map() | undefined) -> map(). +get_metadata(_Name, undefined) -> #{}; +get_metadata(Name, Metadata) -> #{binary_to_atom(K) => normalize_metadata_value(Name, K, V) || K := V <- Metadata}. + +-spec normalize_metadata_value(binary(), binary(), binary() | [binary()]) -> atom() | [atom()]. +normalize_metadata_value(AppName, Key, Value) when is_binary(Value) -> + parse_term(AppName, Value, io_lib:format("metadata value for ~ts", [Key])); +normalize_metadata_value(AppName, Key, Values) when is_list(Values) -> + Value = ["[", lists:join(",", Values), "]"], + parse_term(AppName, Value, io_lib:format("metadata value for ~ts", [Key])). -spec check_and_normalize_template( - string(), - string() | undefined, + binary(), + binary() | undefined, term(), [atom()], [atom()], @@ -184,7 +200,7 @@ check_and_normalize_template( Env, Metadata ) -> - App = erlang:list_to_atom(AppName), + App = binary_to_atom(AppName), Props = case Terms of {application, App, P} when erlang:is_list(P) -> @@ -226,20 +242,19 @@ add_optional_fields(Props, [{K, V0} | Fields]) -> _ -> case V0 =:= V1 of true -> add_optional_fields(Props, Fields); - false -> - erlang:error(app_props_not_compatible, [{K, V0}, {K, V1}]) + false -> erlang:error(app_props_not_compatible, [{K, V0}, {K, V1}]) end end; add_optional_fields(Props, [Field | Fields]) -> add_optional_fields([Field | Props], Fields). --spec verify_app_props(string(), string(), [atom()], [atom()], proplists:proplist()) -> ok. +-spec verify_app_props(binary(), binary(), [atom()], [atom()], proplists:proplist()) -> ok. verify_app_props(AppName, Version, Applications, IncludedApplications, Props0) -> Props1 = verify_applications(AppName, Props0), %% ensure defaults ensure_fields(AppName, Version, Applications, IncludedApplications, Props1). --spec verify_applications(string(), proplists:proplist()) -> ok. +-spec verify_applications(binary(), proplists:proplist()) -> ok. verify_applications(AppName, AppDetail) -> case proplists:get_value(applications, AppDetail) of AppList when is_list(AppList) -> @@ -269,14 +284,14 @@ normalize_application(Applications) -> end, Kernel ++ StdLib ++ Applications. --spec ensure_fields(string(), string(), [atom()], [atom()], proplists:proplist()) -> +-spec ensure_fields(binary(), binary(), [atom()], [atom()], proplists:proplist()) -> proplists:proplist(). ensure_fields(AppName, Version, Applications, IncludedApplications, Props) -> %% default means to add the value if not existing %% match meand to overwrite if not existing and check otherwise for Defaults = [ {{registered, []}, default}, - {{vsn, Version}, match}, + {{vsn, binary_to_list(Version)}, match}, {{description, "missing description"}, default}, {{applications, Applications}, match}, {{included_applications, IncludedApplications}, match} @@ -310,7 +325,7 @@ ensure_fields(AppName, Version, Applications, IncludedApplications, Props) -> -spec render_app_file(string(), application_resource(), file:filename(), [file:filename()]) -> ok. render_app_file(AppName, Terms, Output, Srcs) -> - App = erlang:list_to_atom(AppName), + App = binary_to_atom(AppName), Modules = generate_modules(Srcs), {application, App, Props0} = Terms, %% remove modules key @@ -318,19 +333,19 @@ render_app_file(AppName, Terms, Output, Srcs) -> %% construct new terms Spec = {application, App, [{modules, Modules} | Props1]}, - ToWrite = io_lib:format("~p.\n", [Spec]), - file:write_file(Output, ToWrite, [raw]). + ToWrite = io_lib:format("~kp.\n", [Spec]), + ok = file:write_file(Output, ToWrite, [raw]). -spec generate_modules([file:filename()]) -> [atom()]. generate_modules(Sources) -> Modules = lists:foldl( fun(Source, Acc) -> case filename:extension(Source) of - ".hrl" -> + <<".hrl">> -> Acc; - Ext when Ext == ".erl" orelse Ext == ".xrl" orelse Ext == ".yrl" -> + Ext when Ext == <<".erl">> orelse Ext == <<".xrl">> orelse Ext == <<".yrl">> -> ModuleName = filename:basename(Source, Ext), - Module = erlang:list_to_atom(ModuleName), + Module = erlang:binary_to_atom(ModuleName), [Module | Acc]; _ -> unknown_extension_error(Source) @@ -362,7 +377,7 @@ file_corrupt_error(File, Contents) -> {abort, Msg} ). --spec value_match_error(string(), {atom(), term()}, {atom(), term()}) -> no_return(). +-spec value_match_error(binary(), {atom(), term()}, {atom(), term()}) -> no_return(). value_match_error(AppName, Wrong = {_, Value1}, Default = {_, Value2}) when is_list(Value1) andalso is_list(Value2) -> @@ -410,12 +425,12 @@ applications_type_error(AppName, Applications) -> {abort, Msg} ). --spec module_filed_error(string(), string()) -> no_return(). -module_filed_error(AppName, ModString) -> +-spec parse_error(string(), string(), string()) -> no_return(). +parse_error(AppName, String, Description) -> Msg = io_lib:format( - "error when building ~s.app for application ~s: could not parse value for module field: `~p`", + "error when building ~s.app for application ~s: could not parse value for ~ts: `~p`", [ - AppName, AppName, ModString + AppName, AppName, Description, String ] ), erlang:error( @@ -534,10 +549,11 @@ lcs([_SH | ST] = S, [_TH | TT] = T, Cache, Acc) -> -spec add_metadata(proplists:proplist(), map()) -> proplists:proplist(). add_metadata(Props, Metadata) -> ok = verify_metadata(Props, Metadata), - Props ++ maps:to_list(Metadata). + Props ++ maps:to_list(maps:iterator(Metadata, ordered)). -spec verify_metadata(proplists:proplist(), map()) -> ok. -verify_metadata([], _) -> ok; +verify_metadata([], _) -> + ok; verify_metadata([{K, V0} | T], Metadata) -> case maps:get(K, Metadata, undefined) of undefined -> diff --git a/prelude/erlang/toolchain/dependency_analyzer.escript b/prelude/erlang/toolchain/dependency_analyzer.escript index d714fa36a30..d087a064282 100644 --- a/prelude/erlang/toolchain/dependency_analyzer.escript +++ b/prelude/erlang/toolchain/dependency_analyzer.escript @@ -21,12 +21,12 @@ %%% or a given output file. The format is as follows and intended to %%% be consumed by other file:consult/1: %%% ``` -%%% [#{"type" := "include" +%%% [#{<<"type">> := "include" %%% | "include_lib" %%% | "behaviour" %%% | "parse_transform" %%% | "manual_dependency", -%%% "file" := "header_or_source_file.(h|e)rl", +%%% <<"file">> := "header_or_source_file.(h|e)rl", %%% ["app" => "application"][only for "include_lib"] %%% }, %%% ... @@ -96,10 +96,10 @@ usage() -> do(InFile, Outspec) -> {ok, Forms} = epp_dodger:parse_file(InFile), Dependencies = lists:sort(process_forms(Forms, [])), - OutData = unicode:characters_to_binary(io_lib:format("~p.", [Dependencies])), + OutData = unicode:characters_to_binary(json:encode(Dependencies)), case Outspec of {file, File} -> - file:write_file(File, OutData); + ok = file:write_file(File, OutData); stdout -> io:format("~s~n", [OutData]) end. @@ -108,29 +108,33 @@ do(InFile, Outspec) -> process_forms([], Acc) -> Acc; process_forms([?MATCH_INCLUDE(Include) | Rest], Acc) -> - Dependency = #{"file" => filename:basename(Include), "type" => "include"}, + Dependency = #{<<"file">> => list_to_binary(filename:basename(Include)), <<"type">> => <<"include">>}, process_forms(Rest, [Dependency | Acc]); process_forms([?MATCH_INCLUDE_LIB(IncludeLib) | Rest], Acc) -> Dependency = case filename:split(IncludeLib) of [App, "include", Include] -> - #{"app" => App, "file" => Include, "type" => "include_lib"}; + #{ + <<"app">> => list_to_binary(App), + <<"file">> => list_to_binary(Include), + <<"type">> => <<"include_lib">> + }; _ -> error(malformed_header_include_lib) end, process_forms(Rest, [Dependency | Acc]); process_forms([?MATCH_BEHAVIOR(Module) | Rest], Acc) -> - Dependency = #{"file" => module_to_erl(Module), "type" => "behaviour"}, + Dependency = #{<<"file">> => module_to_erl(Module), <<"type">> => <<"behaviour">>}, process_forms(Rest, [Dependency | Acc]); process_forms([?MATCH_BEHAVIOUR(Module) | Rest], Acc) -> - Dependency = #{"file" => module_to_erl(Module), "type" => "behaviour"}, + Dependency = #{<<"file">> => module_to_erl(Module), <<"type">> => <<"behaviour">>}, process_forms(Rest, [Dependency | Acc]); process_forms([?MATCH_PARSETRANSFORM(Module) | Rest], Acc) -> - Dependency = #{"file" => module_to_erl(Module), "type" => "parse_transform"}, + Dependency = #{<<"file">> => module_to_erl(Module), <<"type">> => <<"parse_transform">>}, process_forms(Rest, [Dependency | Acc]); process_forms([?MATCH_MANUAL_DEPENDENCIES(Modules) | Rest], Acc) -> Dependencies = [ - #{"file" => module_to_erl(Module), "type" => "manual_dependency"} + #{<<"file">> => module_to_erl(Module), <<"type">> => <<"manual_dependency">>} || {tree, atom, _, Module} <- Modules ], process_forms(Rest, Dependencies ++ Acc); @@ -139,4 +143,4 @@ process_forms([_ | Rest], Acc) -> -spec module_to_erl(module()) -> file:filename(). module_to_erl(Module) -> - unicode:characters_to_list([atom_to_list(Module), ".erl"]). + unicode:characters_to_binary([atom_to_list(Module), ".erl"]). diff --git a/prelude/erlang/toolchain/dependency_finalizer.escript b/prelude/erlang/toolchain/dependency_finalizer.escript index 0d829d81e07..eb4574192f4 100644 --- a/prelude/erlang/toolchain/dependency_finalizer.escript +++ b/prelude/erlang/toolchain/dependency_finalizer.escript @@ -1,8 +1,16 @@ %%% % @format +%%% Copyright (c) Meta Platforms, Inc. and affiliates. +%%% +%%% This source code is licensed under both the MIT license found in the +%%% LICENSE-MIT file in the root directory of this source tree and the Apache +%%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%%% of this source tree. -module(dependency_finalizer). -author("loscher@meta.com"). +-type dep_files_data() :: #{file:filename() => #{string() := file:filename()}}. + -spec main([string()]) -> ok | no_return(). main([Source, InFile]) -> do(Source, InFile, stdout); @@ -18,34 +26,43 @@ usage() -> -spec do(file:filename(), file:filename(), {file, file:filename()} | stdout) -> ok. do(Source, InFile, OutSpec) -> - case file:consult(InFile) of - {ok, [DepFiles]} -> + case read_file(InFile) of + {ok, DepFiles} -> Dependencies = build_dep_info(Source, DepFiles), - OutData = unicode:characters_to_binary(to_json_list(Dependencies)), + OutData = unicode:characters_to_binary(json:encode(Dependencies)), case OutSpec of {file, File} -> - file:write_file(File, OutData); + ok = file:write_file(File, OutData); stdout -> io:format("~s~n", [OutData]) end; Err -> - io:format(stderr, "error, could no parse file correctly: ~p~n", [Err]), + io:format(standard_error, "error, could no parse file correctly: ~p~n", [Err]), erlang:halt(1) end. --spec build_dep_info(file:filename(), #{file:filename() => #{string() := file:filename()}}) -> ok. +-spec read_file(file:filename()) -> {ok, dep_files_data()} | {error, term()}. +read_file(File) -> + case file:read_file(File) of + {ok, Data} -> + {ok, json:decode(Data)}; + Err -> + Err + end. + +-spec build_dep_info(file:filename(), dep_files_data()) -> ok. build_dep_info(Source, DepFiles) -> - Key = filename:basename(Source, ".erl") ++ ".beam", + Key = list_to_binary(filename:basename(Source, ".erl") ++ ".beam"), collect_dependencies([Key], DepFiles, sets:new([{version, 2}]), []). collect_dependencies([], _, _, Acc) -> Acc; collect_dependencies([Key | Rest], DepFiles, Visited, Acc) -> case DepFiles of - #{Key := #{"dep_file" := DepFile}} -> - {ok, [Dependencies]} = file:consult(DepFile), + #{Key := #{<<"dep_file">> := DepFile}} -> + {ok, Dependencies} = read_file(DepFile), {NextKeys, NextVisited, NextAcc} = lists:foldl( - fun(#{"file" := File} = Dep, {KeysAcc, VisitedAcc, DepAcc}) -> + fun(#{<<"file">> := File} = Dep, {KeysAcc, VisitedAcc, DepAcc}) -> NextKey = key(File), case sets:is_element(NextKey, VisitedAcc) of true -> {KeysAcc, VisitedAcc, DepAcc}; @@ -73,60 +90,3 @@ key(FileName) -> ".erl" -> filename:basename(FileName, ".erl") ++ ".beam"; _ -> FileName end. - -%%% -%%% JSON encoding: base-line escripts we use in our toolchain need to be dependency less -%%% - --spec to_json_list([#{string() => string()}]) -> string(). -to_json_list(Dependencies) -> - [ - "[", - string:join([json_encode_dependency(Dependency) || Dependency <- Dependencies], ","), - "]" - ]. - --spec json_encode_dependency(#{string() => string()}) -> string(). -json_encode_dependency(Dep) -> - Elements = maps:fold( - fun(Key, Value, Acc) -> - [[json_string_escape(Key), ":", json_string_escape(Value)] | Acc] - end, - [], - Dep - ), - ["{", string:join(Elements, ","), "}"]. - --spec json_string_escape(string()) -> string(). -json_string_escape(Str) -> - [ - "\"", - [json_escape_char(C) || C <- Str], - "\"" - ]. - --spec json_escape_char(non_neg_integer()) -> non_neg_integer() | string(). -json_escape_char($\") -> - [$\\, $\"]; -json_escape_char($\\) -> - [$\\, $\\]; -json_escape_char($\/) -> - [$\\, $\/]; -json_escape_char($\b) -> - [$\\, $\b]; -json_escape_char($\f) -> - [$\\, $\f]; -json_escape_char($\n) -> - [$\\, $\n]; -json_escape_char($\r) -> - [$\\, $\r]; -json_escape_char($\t) -> - [$\\, $\t]; -json_escape_char(C) when C >= 16#20 andalso C =< 16#10FFFF -> - %% unescaped, 16#5C (\) and 16#22 (") are handled above - C; -json_escape_char(C) when C < 16#10000 -> - io_lib:format("\\u~s", [string:pad(integer_to_list(C, 16), 4, leading, " ")]); -json_escape_char(_) -> - %% TODO: support extended unicode characters - error(utf8_extended_character_not_supported). diff --git a/prelude/erlang/toolchain/edoc_cli.escript b/prelude/erlang/toolchain/edoc_cli.escript index d61997d9db9..80a1a5c423d 100644 --- a/prelude/erlang/toolchain/edoc_cli.escript +++ b/prelude/erlang/toolchain/edoc_cli.escript @@ -72,7 +72,7 @@ remove_loggers() -> [logger:remove_handler(H) || H <- logger:get_handler_ids()]. generate_empty_chunk(File, OutputDir) -> - file:write_file( + ok = file:write_file( chunk_path(File, OutputDir), erlang:term_to_binary(failed_to_build_doc_chunk) ). @@ -85,7 +85,7 @@ verify_files_exist(#{files := Files, out_dir := OutputDir}) -> true -> true; false -> - io:format(standard_error, "error: coudn't generate ~s~n", [ChunkPath]), + io:format(standard_error, "error: couldn't generate ~s~n", [ChunkPath]), false end end, diff --git a/prelude/erlang/toolchain/epp_dodger.erl b/prelude/erlang/toolchain/epp_dodger.erl new file mode 100644 index 00000000000..52193e10bc8 --- /dev/null +++ b/prelude/erlang/toolchain/epp_dodger.erl @@ -0,0 +1,944 @@ +%% A temporary port of the official OTP epp_dodger from OTP 27, +%% so that EDoc can also be computed for OTP 26 in presence of the +%% maybe operator. See https://github.com/erlang/otp/issues/7266 +%% ===================================================================== +%% Licensed under the Apache License, Version 2.0 (the "License"); you may +%% not use this file except in compliance with the License. You may obtain +%% a copy of the License at +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% Alternatively, you may use this file under the terms of the GNU Lesser +%% General Public License (the "LGPL") as published by the Free Software +%% Foundation; either version 2.1, or (at your option) any later version. +%% If you wish to allow use of your version of this file only under the +%% terms of the LGPL, you should delete the provisions above and replace +%% them with the notice and other provisions required by the LGPL; see +%% . If you do not delete the provisions +%% above, a recipient may use your version of this file under the terms of +%% either the Apache License or the LGPL. +%% +%% @copyright 2001-2006 Richard Carlsson +%% @author Richard Carlsson +%% @end +%% ===================================================================== + +%% @doc `epp_dodger' - bypasses the Erlang preprocessor. +%% +%%

This module tokenises and parses most Erlang source code without +%% expanding preprocessor directives and macro applications, as long as +%% these are syntactically "well-behaved". Because the normal parse +%% trees of the `erl_parse' module cannot represent these things +%% (normally, they are expanded by the Erlang preprocessor {@link +%% //stdlib/epp} before the parser sees them), an extended syntax tree +%% is created, using the {@link erl_syntax} module.

+ + +%% NOTES: +%% +%% * It's OK if the result does not parse - then at least nothing +%% strange happens, and the user can resort to full preprocessing. +%% However, we must avoid generating a token stream that is accepted by +%% the parser, but has a different meaning than the intended. A typical +%% example is when someone uses token-level string concatenation with +%% macros, as in `"foo" ?bar' (where `?bar' expands to a string). If we +%% replace the tokens `? bar' with `( ... )', to preserve precedence, +%% the result will be parsed as an application `"foo" ( ... )' and cause +%% trouble later on. We must detect such cases and report an error. +%% +%% * It is pointless to add a mechanism for tracking which macros are +%% known to take arguments, and which are known to take no arguments, +%% since a lot of the time we will not have seen the macro definition +%% anyway (it's usually in a header file). Hence, we try to use +%% heuristics instead. In most cases, the token sequence `? foo (' +%% indicates that it is a call of a macro that is supposed to take +%% arguments, but e.g., in the context `: ? foo (', the argument list +%% typically belongs to a remote function call, as in `m:?f(...)' and +%% should be parsed as `m:(?f)(...)' unless it is actually a try-clause +%% pattern such as `throw:?f(...) ->'. +%% +%% * We do our best to make macros without arguments pass the parsing +%% stage transparently. Atoms are accepted in most contexts, but +%% variables are not, so we use only atoms to encode these macros. +%% Sadly, the parsing sometimes discards even the location info from +%% atom tokens, so we can only use the actual characters for this. +%% +%% * We recognize `?m(...' at the start of a form and prevent this from +%% being interpreted as a macro with arguments, since it is probably a +%% function definition. Likewise with attributes `-?m(...'. + +-module(epp_dodger). + +-export([parse_file/1, quick_parse_file/1, parse_file/2, + quick_parse_file/2, parse/1, quick_parse/1, parse/2, + quick_parse/2, parse/3, quick_parse/3, parse_form/2, + parse_form/3, quick_parse_form/2, quick_parse_form/3, + format_error/1, tokens_to_string/1]). + + +%% The following should be: 1) pseudo-uniquely identifiable, and 2) +%% cause nice looking error messages when the parser has to give up. + +-define(macro_call, '? ('). +-define(atom_prefix, "? "). +-define(var_prefix, "?,"). +-define(pp_form, '?preprocessor declaration?'). + + +%% @type errorinfo() = //stdlib/erl_scan:error_info(). +%% +%% This is a so-called Erlang I/O ErrorInfo structure; see the {@link +%% //stdlib/io} module for details. + +-type errorinfo() :: erl_scan:error_info(). + +-type option() :: atom() | {atom(), term()}. + +%% ===================================================================== +%% @spec parse_file(File) -> {ok, Forms} | {error, errorinfo()} +%% File = file:filename() +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @equiv parse_file(File, []) + +-spec parse_file(file:filename()) -> + {'ok', erl_syntax:forms()} | {'error', errorinfo()}. + +parse_file(File) -> + parse_file(File, []). + +%% @spec parse_file(File, Options) -> {ok, Forms} | {error, errorinfo()} +%% File = file:filename() +%% Options = [term()] +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @doc Reads and parses a file. If successful, `{ok, Forms}' +%% is returned, where `Forms' is a list of abstract syntax +%% trees representing the "program forms" of the file (cf. +%% `erl_syntax:is_form/1'). Otherwise, `{error, errorinfo()}' is +%% returned, typically if the file could not be opened. Note that +%% parse errors show up as error markers in the returned list of +%% forms; they do not cause this function to fail or return +%% `{error, errorinfo()}'. +%% +%% Options: +%%
+%%
{@type {no_fail, boolean()@}}
+%%
If `true', this makes `epp_dodger' replace any program forms +%% that could not be parsed with nodes of type `text' (see {@link +%% erl_syntax:text/1}), representing the raw token sequence of the +%% form, instead of reporting a parse error. The default value is +%% `false'.
+%%
{@type {clever, boolean()@}}
+%%
If set to `true', this makes `epp_dodger' try to repair the +%% source code as it seems fit, in certain cases where parsing would +%% otherwise fail. Currently, it inserts `++'-operators between string +%% literals and macros where it looks like concatenation was intended. +%% The default value is `false'.
+%%
+%% +%% @see parse/2 +%% @see quick_parse_file/1 +%% @see erl_syntax:is_form/1 + +-spec parse_file(file:filename(), [option()]) -> + {'ok', erl_syntax:forms()} | {'error', errorinfo()}. + +parse_file(File, Options) -> + parse_file(File, fun parse/3, Options). + +%% @spec quick_parse_file(File) -> {ok, Forms} | {error, errorinfo()} +%% File = file:filename() +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @equiv quick_parse_file(File, []) + +-spec quick_parse_file(file:filename()) -> + {'ok', erl_syntax:forms()} | {'error', errorinfo()}. + +quick_parse_file(File) -> + quick_parse_file(File, []). + +%% @spec quick_parse_file(File, Options) -> +%% {ok, Forms} | {error, errorinfo()} +%% File = file:filename() +%% Options = [term()] +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @doc Similar to {@link parse_file/2}, but does a more quick-and-dirty +%% processing of the code. Macro definitions and other preprocessor +%% directives are discarded, and all macro calls are replaced with +%% atoms. This is useful when only the main structure of the code is of +%% interest, and not the details. Furthermore, the quick-parse method +%% can usually handle more strange cases than the normal, more exact +%% parsing. +%% +%% Options: see {@link parse_file/2}. Note however that for +%% `quick_parse_file/2', the option `no_fail' is `true' by default. +%% +%% @see quick_parse/2 +%% @see parse_file/2 + +-spec quick_parse_file(file:filename(), [option()]) -> + {'ok', erl_syntax:forms()} | {'error', errorinfo()}. + +quick_parse_file(File, Options) -> + parse_file(File, fun quick_parse/3, Options ++ [no_fail]). + +parse_file(File, Parser, Options) -> + case do_parse_file(utf8, File, Parser, Options) of + {ok, Forms}=Ret -> + case find_invalid_unicode(Forms) of + none -> + Ret; + invalid_unicode -> + case epp:read_encoding(File) of + utf8 -> + Ret; + _ -> + do_parse_file(latin1, File, Parser, Options) + end + end; + Else -> + Else + end. + +do_parse_file(DefEncoding, File, Parser, Options) -> + case file:open(File, [read]) of + {ok, Dev} -> + _ = epp:set_encoding(Dev, DefEncoding), + try Parser(Dev, 1, Options) + after ok = file:close(Dev) + end; + {error, Error} -> + {error, {0, file, Error}} % defer to file:format_error/1 + end. + +find_invalid_unicode([H|T]) -> + case H of + {error, {_Location, file_io_server, invalid_unicode}} -> + invalid_unicode; + _Other -> + find_invalid_unicode(T) + end; +find_invalid_unicode([]) -> none. + +%% ===================================================================== +%% @spec parse(IODevice) -> {ok, Forms} | {error, errorinfo()} +%% @equiv parse(IODevice, 1) + +-spec parse(file:io_device()) -> {'ok', erl_syntax:forms()}. + +parse(Dev) -> + parse(Dev, 1). + +%% @spec parse(IODevice, StartLocation) -> {ok, Forms} | {error, errorinfo()} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @equiv parse(IODevice, StartLocation, []) +%% @see parse/1 + +-spec parse(file:io_device(), erl_anno:location()) -> {'ok', erl_syntax:forms()}. + +parse(Dev, L) -> + parse(Dev, L, []). + +%% @spec parse(IODevice, StartLocation, Options) -> +%% {ok, Forms} | {error, errorinfo()} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Options = [term()] +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @doc Reads and parses program text from an I/O stream. Characters are +%% read from `IODevice' until end-of-file; apart from this, the +%% behaviour is the same as for {@link parse_file/2}. `StartLocation' is the +%% initial location. +%% +%% @see parse/2 +%% @see parse_file/2 +%% @see parse_form/2 +%% @see quick_parse/3 + +-spec parse(file:io_device(), erl_anno:location(), [option()]) -> + {'ok', erl_syntax:forms()}. + +parse(Dev, L0, Options) -> + parse(Dev, L0, fun parse_form/3, Options). + +%% @spec quick_parse(IODevice) -> {ok, Forms} | {error, errorinfo()} +%% @equiv quick_parse(IODevice, 1) + +-spec quick_parse(file:io_device()) -> + {'ok', erl_syntax:forms()}. + +quick_parse(Dev) -> + quick_parse(Dev, 1). + +%% @spec quick_parse(IODevice, StartLocation) -> +%% {ok, Forms} | {error, errorinfo()} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @equiv quick_parse(IODevice, StartLocation, []) +%% @see quick_parse/1 + +-spec quick_parse(file:io_device(), erl_anno:location()) -> + {'ok', erl_syntax:forms()}. + +quick_parse(Dev, L) -> + quick_parse(Dev, L, []). + +%% @spec (IODevice, StartLocation, Options) -> +%% {ok, Forms} | {error, errorinfo()} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Options = [term()] +%% Forms = [erl_syntax:syntaxTree()] +%% +%% @doc Similar to {@link parse/3}, but does a more quick-and-dirty +%% processing of the code. See {@link quick_parse_file/2} for details. +%% +%% @see quick_parse/2 +%% @see quick_parse_file/2 +%% @see quick_parse_form/2 +%% @see parse/3 + +-spec quick_parse(file:io_device(), erl_anno:location(), [option()]) -> + {'ok', erl_syntax:forms()}. + +quick_parse(Dev, L0, Options) -> + parse(Dev, L0, fun quick_parse_form/3, Options). + +parse(Dev, L0, Parser, Options) -> + parse(Dev, L0, [], Parser, Options). + +parse(Dev, L0, Fs, Parser, Options) -> + case Parser(Dev, L0, Options) of + {ok, none, L1} -> + parse(Dev, L1, Fs, Parser, Options); + {ok, F, L1} -> + parse(Dev, L1, [F | Fs], Parser, Options); + {error, IoErr, L1} -> + parse(Dev, L1, [{error, IoErr} | Fs], Parser, Options); + {eof, _L1} -> + {ok, lists:reverse(Fs)} + end. + + +%% ===================================================================== +%% @spec parse_form(IODevice, StartLocation) -> {ok, Form, Location} +%% | {eof, Location} +%% | {error, errorinfo(), Location} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Form = erl_syntax:syntaxTree() +%% Location = //stdlib/erl_anno:location() +%% +%% @equiv parse_form(IODevice, StartLocation, []) +%% +%% @see quick_parse_form/2 + +-spec parse_form(file:io_device(), erl_anno:location()) -> + {'ok', erl_syntax:forms(), erl_anno:location()} + | {'eof', erl_anno:location()} | {'error', errorinfo(), erl_anno:location()}. + +parse_form(Dev, L0) -> + parse_form(Dev, L0, []). + +%% @spec parse_form(IODevice, StartLocation, Options) -> +%% {ok, Form, Location} +%% | {eof, Location} +%% | {error, errorinfo(), Location} +%% +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Options = [term()] +%% Form = erl_syntax:syntaxTree() +%% Location = //stdlib/erl_anno:location() +%% +%% @doc Reads and parses a single program form from an I/O stream. +%% Characters are read from `IODevice' until an end-of-form +%% marker is found (a period character followed by whitespace), or until +%% end-of-file; apart from this, the behaviour is similar to that of +%% `parse/3', except that the return values also contain the +%% final location given that `StartLocation' is the initial +%% location, and that `{eof, Location}' may be returned. +%% +%% @see parse/3 +%% @see parse_form/2 +%% @see quick_parse_form/3 + +-spec parse_form(file:io_device(), erl_anno:location(), [option()]) -> + {'ok', erl_syntax:forms(), erl_anno:location()} + | {'eof', erl_anno:location()} | {'error', errorinfo(), erl_anno:location()}. + +parse_form(Dev, L0, Options) -> + parse_form(Dev, L0, fun normal_parser/2, Options). + +%% @spec quick_parse_form(IODevice, StartLocation) -> +%% {ok, Form, Location} +%% | {eof, Location} +%% | {error, errorinfo(), Location} +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Form = erl_syntax:syntaxTree() | none +%% Location = //stdlib/erl_anno:location() +%% +%% @equiv quick_parse_form(IODevice, StartLocation, []) +%% +%% @see parse_form/2 + +-spec quick_parse_form(file:io_device(), erl_anno:location()) -> + {'ok', erl_syntax:forms(), erl_anno:location()} + | {'eof', erl_anno:location()} | {'error', errorinfo(), erl_anno:location()}. + +quick_parse_form(Dev, L0) -> + quick_parse_form(Dev, L0, []). + +%% @spec quick_parse_form(IODevice, StartLocation, Options) -> +%% {ok, Form, Location} +%% | {eof, Location} +%% | {error, errorinfo(), Location} +%% +%% IODevice = pid() +%% StartLocation = //stdlib/erl_anno:location() +%% Options = [term()] +%% Form = erl_syntax:syntaxTree() +%% Location = //stdlib/erl_anno:location() +%% +%% @doc Similar to {@link parse_form/3}, but does a more quick-and-dirty +%% processing of the code. See {@link quick_parse_file/2} for details. +%% +%% @see parse/3 +%% @see quick_parse_form/2 +%% @see parse_form/3 + +-spec quick_parse_form(file:io_device(), erl_anno:location(), [option()]) -> + {'ok', erl_syntax:forms(), erl_anno:location()} + | {'eof', erl_anno:location()} | {'error', errorinfo(), erl_anno:location()}. + +quick_parse_form(Dev, L0, Options) -> + parse_form(Dev, L0, fun quick_parser/2, Options). + +-record(opt, {clever = false :: boolean()}). + +parse_form(Dev, L0, Parser, Options) -> + NoFail = proplists:get_bool(no_fail, Options), + Opt = #opt{clever = proplists:get_bool(clever, Options)}, + + %% This as the *potential* to read options for enabling/disabling + %% features for the parsing of the file. + {ok, {_Ftrs, ResWordFun}} = + erl_features:keyword_fun(Options, fun reserved_word/1), + + case io:scan_erl_form(Dev, "", L0, [{reserved_word_fun,ResWordFun}]) of + {ok, Ts, L1} -> + case catch {ok, Parser(Ts, Opt)} of + {'EXIT', Term} -> + {error, io_error(L1, {unknown, Term}), L1}; + {error, Term} -> + IoErr = io_error(L1, Term), + {error, IoErr, L1}; + {parse_error, _IoErr} when NoFail -> + {ok, erl_syntax:set_pos( + erl_syntax:text(tokens_to_string(Ts)), + erl_anno:new(start_pos(Ts, L1))), + L1}; + {parse_error, IoErr} -> + {error, IoErr, L1}; + {ok, F} -> + {ok, F, L1} + end; + {error, _IoErr, _L1} = Err -> Err; + {error, _Reason} -> {eof, L0}; % This is probably encoding problem + {eof, _L1} = Eof -> Eof + end. + +io_error(L, Desc) -> + {L, ?MODULE, Desc}. + +start_pos([T | _Ts], _L) -> + erl_anno:location(element(2, T)); +start_pos([], L) -> + L. + +%% Exception-throwing wrapper for the standard Erlang parser stage + +parse_tokens(Ts) -> + parse_tokens(Ts, fun fix_form/1). + +parse_tokens(Ts, Fix) -> + case erl_parse:parse_form(Ts) of + {ok, Form} -> + Form; + {error, IoErr} -> + case Fix(Ts) of + {form, Form} -> + Form; + {retry, Ts1, Fix1} -> + parse_tokens(Ts1, Fix1); + error -> + throw({parse_error, IoErr}) + end + end. + +%% --------------------------------------------------------------------- +%% Quick scanning/parsing - deletes macro definitions and other +%% preprocessor directives, and replaces all macro calls with atoms. + +quick_parser(Ts, _Opt) -> + filter_form(parse_tokens(quickscan_form(Ts))). + +quickscan_form([{'-', _Anno}, {atom, AnnoA, define} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, undef} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, include} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, include_lib} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, ifdef} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, ifndef} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {'if', AnnoA} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, elif} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, 'else'} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {'else', AnnoA} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, endif} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', _Anno}, {atom, AnnoA, feature} | _Ts]) -> + kill_form(AnnoA); +quickscan_form([{'-', Anno}, {'?', _}, {Type, _, _}=N | [{'(', _} | _]=Ts]) + when Type =:= atom; Type =:= var -> + %% minus, macro and open parenthesis at start of form - assume that + %% the macro takes no arguments; e.g. `-?foo(...).' + quickscan_macros_1(N, Ts, [{'-', Anno}]); +quickscan_form([{'?', _Anno}, {Type, _, _}=N | [{'(', _} | _]=Ts]) + when Type =:= atom; Type =:= var -> + %% macro and open parenthesis at start of form - assume that the + %% macro takes no arguments (see scan_macros for details) + quickscan_macros_1(N, Ts, []); +quickscan_form(Ts) -> + quickscan_macros(Ts). + +kill_form(A) -> + [{atom, A, ?pp_form}, {'(', A}, {')', A}, {'->', A}, {atom, A, kill}, + {dot, A}]. + +quickscan_macros(Ts) -> + quickscan_macros(Ts, []). + +quickscan_macros([{'?',_}, {Type, _, A} | Ts], [{string, AnnoS, S} | As]) + when Type =:= atom; Type =:= var -> + %% macro after a string literal: change to a single string + {_, Ts1} = skip_macro_args(Ts), + S1 = S ++ quick_macro_string(A), + quickscan_macros(Ts1, [{string, AnnoS, S1} | As]); +quickscan_macros([{'?',_}, {Type, _, _}=N | [{'(',_}|_]=Ts], + [{':',_}|_]=As) + when Type =:= atom; Type =:= var -> + %% macro and open parenthesis after colon - check the token + %% following the arguments (see scan_macros for details) + Ts1 = case skip_macro_args(Ts) of + {_, [{'->',_} | _] = Ts2} -> Ts2; + {_, [{'when',_} | _] = Ts2} -> Ts2; + {_, [{':',_} | _] = Ts2} -> Ts2; + _ -> Ts %% assume macro without arguments + end, + quickscan_macros_1(N, Ts1, As); +quickscan_macros([{'?',_}, {Type, _, _}=N | Ts], As) + when Type =:= atom; Type =:= var -> + %% macro with or without arguments + {_, Ts1} = skip_macro_args(Ts), + quickscan_macros_1(N, Ts1, As); +quickscan_macros([T | Ts], As) -> + quickscan_macros(Ts, [T | As]); +quickscan_macros([], As) -> + lists:reverse(As). + +%% (after a macro has been found and the arglist skipped, if any) +quickscan_macros_1({_Type, _, A}, [{string, AnnoS, S} | Ts], As) -> + %% string literal following macro: change to single string + S1 = quick_macro_string(A) ++ S, + quickscan_macros(Ts, [{string, AnnoS, S1} | As]); +quickscan_macros_1({_Type, AnnoA, A}, Ts, As) -> + %% normal case - just replace the macro with an atom + quickscan_macros(Ts, [{atom, AnnoA, quick_macro_atom(A)} | As]). + +quick_macro_atom(A) -> + list_to_atom("?" ++ atom_to_list(A)). + +quick_macro_string(A) -> + "(?" ++ atom_to_list(A) ++ ")". + +%% Skipping to the end of a macro call, tracking open/close constructs. +%% @spec (Tokens) -> {Skipped, Rest} + +skip_macro_args([{'(',_}=T | Ts]) -> + skip_macro_args(Ts, [')'], [T]); +skip_macro_args(Ts) -> + {[], Ts}. + +skip_macro_args([{'(',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, [')' | Es], [T | As]); +skip_macro_args([{'{',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['}' | Es], [T | As]); +skip_macro_args([{'[',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, [']' | Es], [T | As]); +skip_macro_args([{'<<',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['>>' | Es], [T | As]); +skip_macro_args([{'begin',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['end' | Es], [T | As]); +skip_macro_args([{'if',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['end' | Es], [T | As]); +skip_macro_args([{'case',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['end' | Es], [T | As]); +skip_macro_args([{'receive',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['end' | Es], [T | As]); +skip_macro_args([{'try',_}=T | Ts], Es, As) -> + skip_macro_args(Ts, ['end' | Es], [T | As]); +skip_macro_args([{E,_}=T | Ts], [E], As) -> %final close + {lists:reverse([T | As]), Ts}; +skip_macro_args([{E,_}=T | Ts], [E | Es], As) -> %matching close + skip_macro_args(Ts, Es, [T | As]); +skip_macro_args([T | Ts], Es, As) -> + skip_macro_args(Ts, Es, [T | As]); +skip_macro_args([], _Es, _As) -> + throw({error, macro_args}). + +filter_form({function, _, ?pp_form, _, + [{clause, _, [], [], [{atom, _, kill}]}]}) -> + none; +filter_form(T) -> + T. + + +%% --------------------------------------------------------------------- +%% Normal parsing - try to preserve all information + +normal_parser(Ts0, Opt) -> + case scan_form(Ts0, Opt) of + Ts when is_list(Ts) -> + rewrite_form(parse_tokens(Ts)); + Node -> + Node + end. + +scan_form([{'-', _Anno}, {atom, AnnoA, define} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, define} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, undef} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, undef} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, include} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, include} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, include_lib} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, include_lib} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, ifdef} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, ifdef} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, ifndef} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, ifndef} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {'if', AnnoA} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, 'if'} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, elif} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, 'elif'} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, 'else'} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, 'else'} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {'else', AnnoA} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, 'else'} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, endif} | Ts], Opt) -> + [{atom, AnnoA, ?pp_form}, {'(', AnnoA}, {')', AnnoA}, {'->', AnnoA}, + {atom, AnnoA, endif} | scan_macros(Ts, Opt)]; +scan_form([{'-', _Anno}, {atom, AnnoA, error} | Ts], _Opt) -> + Desc = build_info_string("-error", Ts), + ErrorInfo = {erl_anno:location(AnnoA), ?MODULE, {error, Desc}}, + erl_syntax:error_marker(ErrorInfo); +scan_form([{'-', _Anno}, {atom, AnnoA, warning} | Ts], _Opt) -> + Desc = build_info_string("-warning", Ts), + ErrorInfo = {erl_anno:location(AnnoA), ?MODULE, {warning, Desc}}, + erl_syntax:error_marker(ErrorInfo); +scan_form([{'-', A}, {'?', A1}, {Type, _, _}=N | [{'(', _} | _]=Ts], Opt) + when Type =:= atom; Type =:= var -> + %% minus, macro and open parenthesis at start of form - assume that + %% the macro takes no arguments; e.g. `-?foo(...).' + macro(A1, N, Ts, [{'-', A}], Opt); +scan_form([{'?', A}, {Type, _, _}=N | [{'(', _} | _]=Ts], Opt) + when Type =:= atom; Type =:= var -> + %% macro and open parenthesis at start of form - assume that the + %% macro takes no arguments; probably a function declaration on the + %% form `?m(...) -> ...', which will not parse if it is rewritten as + %% `(?m(...)) -> ...', so it must be handled as `(?m)(...) -> ...' + macro(A, N, Ts, [], Opt); +scan_form(Ts, Opt) -> + scan_macros(Ts, Opt). + +build_info_string(Prefix, Ts0) -> + Ts = lists:droplast(Ts0), + String = lists:droplast(tokens_to_string(Ts)), + Prefix ++ " " ++ String ++ ".". + +scan_macros(Ts, Opt) -> + scan_macros(Ts, [], Opt). + +scan_macros([{'?', _}=M, {Type, _, _}=N | Ts], [{string, AnnoS, _}=S | As], + #opt{clever = true}=Opt) + when Type =:= atom; Type =:= var -> + %% macro after a string literal: be clever and insert ++ + scan_macros([M, N | Ts], [{'++', AnnoS}, S | As], Opt); +scan_macros([{'?', Anno}, {Type, _, _}=N | [{'(',_}|_]=Ts], + [{':',_}|_]=As, Opt) + when Type =:= atom; Type =:= var -> + %% macro and open parentheses after colon - probably a call + %% `m:?F(...)' so the argument list might belong to the call, not + %% the macro - but it could also be a try-clause pattern + %% `...:?T(...) ->' - we need to check the token following the + %% arguments to decide + {Args, Rest} = skip_macro_args(Ts), + case Rest of + [{'->',_} | _] -> + macro_call(Args, Anno, N, Rest, As, Opt); + [{'when',_} | _] -> + macro_call(Args, Anno, N, Rest, As, Opt); + [{':',_} | _] -> + macro_call(Args, Anno, N, Rest, As, Opt); + _ -> + macro(Anno, N, Ts, As, Opt) + end; +scan_macros([{'?', Anno}, {Type, _, _}=N | [{'(',_}|_]=Ts], As, Opt) + when Type =:= atom; Type =:= var -> + %% macro with arguments + {Args, Rest} = skip_macro_args(Ts), + macro_call(Args, Anno, N, Rest, As, Opt); +scan_macros([{'?', Anno }, {Type, _, _}=N | Ts], As, Opt) + when Type =:= atom; Type =:= var -> + %% macro without arguments + macro(Anno, N, Ts, As, Opt); +scan_macros([T | Ts], As, Opt) -> + scan_macros(Ts, [T | As], Opt); +scan_macros([], As, _Opt) -> + lists:reverse(As). + +%% Rewriting to a tuple which will be recognized by the post-parse pass +%% (we insert parentheses to preserve the precedences when parsing). + +macro(Anno, {Type, _, A}, Rest, As, Opt) -> + scan_macros_1([], Rest, [{atom,Anno,macro_atom(Type,A)} | As], Opt). + +macro_call([{'(',_}, {')',_}], Anno, {_, AnnoN, _}=N, Rest, As, Opt) -> + {Open, Close} = parentheses(As), + scan_macros_1([], Rest, + %% {'?macro_call', N } + lists:reverse(Open ++ [{'{', Anno}, + {atom, Anno, ?macro_call}, + {',', Anno}, + N, + {'}', AnnoN}] ++ Close, + As), Opt); +macro_call([{'(',_} | Args], Anno, {_, AnnoN, _}=N, Rest, As, Opt) -> + {Open, Close} = parentheses(As), + %% drop closing parenthesis + {')', _} = lists:last(Args), %% assert + Args1 = lists:droplast(Args), + %% note that we must scan the argument list; it may not be skipped + scan_macros_1(Args1 ++ [{'}', AnnoN} | Close], + Rest, + %% {'?macro_call', N, Arg1, ... } + lists:reverse(Open ++ [{'{', Anno}, + {atom, Anno, ?macro_call}, + {',', Anno}, + N, + {',', AnnoN}], + As), Opt). + +macro_atom(atom, A) -> + list_to_atom(?atom_prefix ++ atom_to_list(A)); +macro_atom(var, A) -> + list_to_atom(?var_prefix ++ atom_to_list(A)). + +%% don't insert parentheses after a string token, to avoid turning +%% `"string" ?macro' into a "function application" `"string"(...)' +%% (see note at top of file) +parentheses([{string, _, _} | _]) -> + {[], []}; +parentheses(_) -> + {[{'(',0}], [{')',0}]}. + +%% (after a macro has been found and the arglist skipped, if any) +scan_macros_1(Args, [{string, AnnoS, _} | _]=Rest, As, + #opt{clever = true}=Opt) -> + %% string literal following macro: be clever and insert ++ + scan_macros(Args ++ [{'++', AnnoS} | Rest], As, Opt); +scan_macros_1(Args, Rest, As, Opt) -> + %% normal case - continue scanning + scan_macros(Args ++ Rest, As, Opt). + +rewrite_form({function, Anno, ?pp_form, _, + [{clause, _, [], [], [{call, _, A, As}]}]}) -> + erl_syntax:set_pos(erl_syntax:attribute(A, rewrite_list(As)), Anno); +rewrite_form({function, Anno, ?pp_form, _, [{clause, _, [], [], [A]}]}) -> + erl_syntax:set_pos(erl_syntax:attribute(A), Anno); +rewrite_form(T) -> + rewrite(T). + +rewrite_list([T | Ts]) -> + [rewrite(T) | rewrite_list(Ts)]; +rewrite_list([]) -> + []. + +%% Note: as soon as we start using erl_syntax:subtrees/1 and similar +%% functions, we cannot assume that we know the exact representation of +%% the syntax tree anymore - we must use erl_syntax functions to analyze +%% and decompose the data. + +rewrite(Node) -> + case erl_syntax:type(Node) of + atom -> + case atom_to_list(erl_syntax:atom_value(Node)) of + ?atom_prefix ++ As -> + A1 = list_to_atom(As), + N = erl_syntax:copy_pos(Node, erl_syntax:atom(A1)), + erl_syntax:copy_pos(Node, erl_syntax:macro(N)); + ?var_prefix ++ As -> + A1 = list_to_atom(As), + N = erl_syntax:copy_pos(Node, erl_syntax:variable(A1)), + erl_syntax:copy_pos(Node, erl_syntax:macro(N)); + _ -> + Node + end; + tuple -> + case erl_syntax:tuple_elements(Node) of + [MagicWord, A | As] -> + case erl_syntax:type(MagicWord) of + atom -> + case erl_syntax:atom_value(MagicWord) of + ?macro_call -> + M = erl_syntax:macro(A, rewrite_list(As)), + erl_syntax:copy_pos(Node, M); + _ -> + rewrite_1(Node) + end; + _ -> + rewrite_1(Node) + end; + _ -> + rewrite_1(Node) + end; + _ -> + rewrite_1(Node) + end. + +rewrite_1(Node) -> + case erl_syntax:subtrees(Node) of + [] -> + Node; + Gs -> + Node1 = erl_syntax:make_tree(erl_syntax:type(Node), + [[rewrite(T) || T <- Ts] + || Ts <- Gs]), + erl_syntax:copy_pos(Node, Node1) + end. + +%% attempting a rescue operation on a token sequence for a single form +%% if it could not be parsed after the normal treatment + +fix_form([{atom, _, ?pp_form}, {'(', _}, {')', _}, {'->', _}, + {atom, _, define}, {'(', _} | _]=Ts) -> + case lists:reverse(Ts) of + [{dot, _}, {')', _} | _] -> + {retry, Ts, fun fix_define/1}; + [{dot, Anno} | Ts1] -> + Ts2 = lists:reverse([{dot, Anno}, {')', Anno} | Ts1]), + {retry, Ts2, fun fix_define/1}; + _ -> + error + end; +fix_form(_Ts) -> + error. + +fix_define([{atom, Anno, ?pp_form}, {'(', _}, {')', _}, {'->', _}, + {atom, AnnoA, define}, {'(', _}, N, {',', _} | Ts]) -> + [{dot, _}, {')', _} | Ts1] = lists:reverse(Ts), + S = tokens_to_string(lists:reverse(Ts1)), + A = erl_syntax:set_pos(erl_syntax:atom(define), AnnoA), + Txt = erl_syntax:set_pos(erl_syntax:text(S), AnnoA), + {form, erl_syntax:set_pos(erl_syntax:attribute(A, [N, Txt]), Anno)}; +fix_define(_Ts) -> + error. + +%% @spec tokens_to_string(Tokens::[term()]) -> string() +%% +%% @doc Generates a string corresponding to the given token sequence. +%% The string can be re-tokenized to yield the same token list again. + +-spec tokens_to_string([term()]) -> string(). + +tokens_to_string([{atom,_,A} | Ts]) -> + io_lib:write_atom(A) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{string, _, S} | Ts]) -> + io_lib:write_string(S) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{char, _, C} | Ts]) -> + io_lib:write_char(C) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{float, _, F} | Ts]) -> + float_to_list(F) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{integer, _, N} | Ts]) -> + integer_to_list(N) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{var, _, A} | Ts]) -> + atom_to_list(A) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([{dot, _} | Ts]) -> + ".\n" ++ tokens_to_string(Ts); +tokens_to_string([{A, _} | Ts]) -> + atom_to_list(A) ++ " " ++ tokens_to_string(Ts); +tokens_to_string([]) -> + "". + + +%% @spec format_error(Descriptor::term()) -> string() +%% @hidden +%% @doc Callback function for formatting error descriptors. Not for +%% normal use. + +-spec format_error(term()) -> string(). + +format_error(macro_args) -> + errormsg("macro call missing end parenthesis"); +format_error({error, Error}) -> + Error; +format_error({warning, Error}) -> + Error; +format_error({unknown, Reason}) -> + errormsg(io_lib:format("unknown error: ~tP", [Reason, 15])). + +errormsg(String) -> + io_lib:format("~s: ~ts", [?MODULE, String]). + + +%% ===================================================================== + +%% See #7266: The dodger currently does not process feature attributes +%% correctly, so temporarily consider the `else` and `maybe` atoms +%% always as keywords +-spec reserved_word(Atom :: atom()) -> boolean(). +reserved_word('else') -> true; +reserved_word('maybe') -> true; +reserved_word(Atom) -> erl_scan:f_reserved_word(Atom). diff --git a/prelude/apple/xcode_postbuild_script.bzl b/prelude/erlang/toolchain/escript_trampoline.sh old mode 100644 new mode 100755 similarity index 75% rename from prelude/apple/xcode_postbuild_script.bzl rename to prelude/erlang/toolchain/escript_trampoline.sh index 3c425a48e3d..dbf39844bf3 --- a/prelude/apple/xcode_postbuild_script.bzl +++ b/prelude/erlang/toolchain/escript_trampoline.sh @@ -1,3 +1,4 @@ +#! /usr/bin/env bash # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under both the MIT license found in the @@ -5,5 +6,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -def xcode_postbuild_script_impl(_ctx: AnalysisContext) -> list[Provider]: - return [DefaultInfo()] +cmd=("$2" "$1/run.escript" "${@:3}") + +"${cmd[@]}" diff --git a/prelude/erlang/toolchain/json.erl b/prelude/erlang/toolchain/json.erl new file mode 100644 index 00000000000..e2f704cd2fc --- /dev/null +++ b/prelude/erlang/toolchain/json.erl @@ -0,0 +1,1632 @@ +%% A temporary import of OTP27 json.erl with embedded json.hrl and +%% stripped doc sttributed to make json encoding and decoding +%% available in buck2 rules. +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2024-2024. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% % @format +%% +-module(json). + +-dialyzer(no_improper_lists). + +-export([ + encode/1, encode/2, + encode_value/2, + encode_atom/2, + encode_integer/1, + encode_float/1, + encode_list/2, + encode_map/2, + encode_map_checked/2, + encode_key_value_list/2, + encode_key_value_list_checked/2, + encode_binary/1, + encode_binary_escape_all/1 +]). +-export_type([encoder/0, encode_value/0]). + +-export([ + decode/1, decode/3, decode_start/3, decode_continue/2 +]). +-export_type([ + from_binary_fun/0, + array_start_fun/0, + array_push_fun/0, + array_finish_fun/0, + object_start_fun/0, + object_push_fun/0, + object_finish_fun/0, + decoders/0, + decode_value/0, + continuation_state/0 +]). + +-compile(warn_missing_spec). + +-compile( + {inline, [ + encode_atom/2, + encode_integer/1, + encode_float/1, + encode_object/1, + escape/1, + escape_binary/1, + escape_all/1, + utf8t/0, + utf8s/0, + utf8s0/0, + hex_to_int/4, + string/6 + ]} +). + +%% +%% %CopyrightBegin% +%% +%% Copyright Ericsson AB 2024. All Rights Reserved. +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. +%% +%% %CopyrightEnd% +%% + +%% A lot of the macros below use multi-value comparisons where +%% range checks would have worked just fine. This is because +%% the compiler & JIT can emit better code in some cases when +%% multiple clauses are to be dispatched based on such sets +%% of values. They'll generate an efficient "jump table", +%% which gets to the correct clause in one go, rather +%% than going through a set of comparisons. +%% However, this might not always be the best way (see is_0_to_9), +%% so as always with any performance work - measure, don't guess! + +-define(is_1_to_9(X), + X =:= $1 orelse + X =:= $2 orelse + X =:= $3 orelse + X =:= $4 orelse + X =:= $5 orelse + X =:= $6 orelse + X =:= $7 orelse + X =:= $8 orelse + X =:= $9 +). + +-define(is_0_to_9(X), X >= $0 andalso X =< $9). + +-define(is_ws(X), X =:= $\s; X =:= $\t; X =:= $\r; X =:= $\n). + +-define(is_ascii_escape(Byte), + Byte =:= 0 orelse + Byte =:= 1 orelse + Byte =:= 2 orelse + Byte =:= 3 orelse + Byte =:= 4 orelse + Byte =:= 5 orelse + Byte =:= 6 orelse + Byte =:= 7 orelse + Byte =:= 8 orelse + Byte =:= 9 orelse + Byte =:= 10 orelse + Byte =:= 11 orelse + Byte =:= 12 orelse + Byte =:= 13 orelse + Byte =:= 14 orelse + Byte =:= 15 orelse + Byte =:= 16 orelse + Byte =:= 17 orelse + Byte =:= 18 orelse + Byte =:= 19 orelse + Byte =:= 20 orelse + Byte =:= 21 orelse + Byte =:= 22 orelse + Byte =:= 23 orelse + Byte =:= 24 orelse + Byte =:= 25 orelse + Byte =:= 26 orelse + Byte =:= 27 orelse + Byte =:= 28 orelse + Byte =:= 29 orelse + Byte =:= 30 orelse + Byte =:= 31 orelse + Byte =:= 34 orelse + Byte =:= 92 +). +-define(is_ascii_plain(Byte), + Byte =:= 32 orelse + Byte =:= 33 orelse + Byte =:= 35 orelse + Byte =:= 36 orelse + Byte =:= 37 orelse + Byte =:= 38 orelse + Byte =:= 39 orelse + Byte =:= 40 orelse + Byte =:= 41 orelse + Byte =:= 42 orelse + Byte =:= 43 orelse + Byte =:= 44 orelse + Byte =:= 45 orelse + Byte =:= 46 orelse + Byte =:= 47 orelse + Byte =:= 48 orelse + Byte =:= 49 orelse + Byte =:= 50 orelse + Byte =:= 51 orelse + Byte =:= 52 orelse + Byte =:= 53 orelse + Byte =:= 54 orelse + Byte =:= 55 orelse + Byte =:= 56 orelse + Byte =:= 57 orelse + Byte =:= 58 orelse + Byte =:= 59 orelse + Byte =:= 60 orelse + Byte =:= 61 orelse + Byte =:= 62 orelse + Byte =:= 63 orelse + Byte =:= 64 orelse + Byte =:= 65 orelse + Byte =:= 66 orelse + Byte =:= 67 orelse + Byte =:= 68 orelse + Byte =:= 69 orelse + Byte =:= 70 orelse + Byte =:= 71 orelse + Byte =:= 72 orelse + Byte =:= 73 orelse + Byte =:= 74 orelse + Byte =:= 75 orelse + Byte =:= 76 orelse + Byte =:= 77 orelse + Byte =:= 78 orelse + Byte =:= 79 orelse + Byte =:= 80 orelse + Byte =:= 81 orelse + Byte =:= 82 orelse + Byte =:= 83 orelse + Byte =:= 84 orelse + Byte =:= 85 orelse + Byte =:= 86 orelse + Byte =:= 87 orelse + Byte =:= 88 orelse + Byte =:= 89 orelse + Byte =:= 90 orelse + Byte =:= 91 orelse + Byte =:= 93 orelse + Byte =:= 94 orelse + Byte =:= 95 orelse + Byte =:= 96 orelse + Byte =:= 97 orelse + Byte =:= 98 orelse + Byte =:= 99 orelse + Byte =:= 100 orelse + Byte =:= 101 orelse + Byte =:= 102 orelse + Byte =:= 103 orelse + Byte =:= 104 orelse + Byte =:= 105 orelse + Byte =:= 106 orelse + Byte =:= 107 orelse + Byte =:= 108 orelse + Byte =:= 109 orelse + Byte =:= 110 orelse + Byte =:= 111 orelse + Byte =:= 112 orelse + Byte =:= 113 orelse + Byte =:= 114 orelse + Byte =:= 115 orelse + Byte =:= 116 orelse + Byte =:= 117 orelse + Byte =:= 118 orelse + Byte =:= 119 orelse + Byte =:= 120 orelse + Byte =:= 121 orelse + Byte =:= 122 orelse + Byte =:= 123 orelse + Byte =:= 124 orelse + Byte =:= 125 orelse + Byte =:= 126 orelse + Byte =:= 127 +). + +-define(are_all_ascii_plain(B1, B2, B3, B4, B5, B6, B7, B8), + (?is_ascii_plain(B1)) andalso + (?is_ascii_plain(B2)) andalso + (?is_ascii_plain(B3)) andalso + (?is_ascii_plain(B4)) andalso + (?is_ascii_plain(B5)) andalso + (?is_ascii_plain(B6)) andalso + (?is_ascii_plain(B7)) andalso + (?is_ascii_plain(B8)) +). + +-define(UTF8_ACCEPT, 0). +-define(UTF8_REJECT, 12). + +%% +%% Encoding implementation +%% + +-type encoder() :: fun((dynamic(), encoder()) -> iodata()). + +-type encode_value() :: + integer() + | float() + | boolean() + | null + | binary() + | atom() + | list(encode_value()) + | encode_map(encode_value()). + +-type encode_map(Value) :: #{binary() | atom() | integer() => Value}. + +-spec encode(encode_value()) -> iodata(). +encode(Term) -> encode(Term, fun do_encode/2). + +-spec encode(dynamic(), encoder()) -> iodata(). +encode(Term, Encoder) when is_function(Encoder, 2) -> + Encoder(Term, Encoder). + +-spec encode_value(dynamic(), encoder()) -> iodata(). +encode_value(Value, Encode) -> + do_encode(Value, Encode). + +-spec do_encode(dynamic(), encoder()) -> iodata(). +do_encode(Value, Encode) when is_atom(Value) -> + encode_atom(Value, Encode); +do_encode(Value, _Encode) when is_binary(Value) -> + escape_binary(Value); +do_encode(Value, _Encode) when is_integer(Value) -> + encode_integer(Value); +do_encode(Value, _Encode) when is_float(Value) -> + encode_float(Value); +do_encode(Value, Encode) when is_list(Value) -> + do_encode_list(Value, Encode); +do_encode(Value, Encode) when is_map(Value) -> + do_encode_map(Value, Encode); +do_encode(Other, _Encode) -> + error({unsupported_type, Other}). + +-spec encode_atom(atom(), encoder()) -> iodata(). +encode_atom(null, _Encode) -> <<"null">>; +encode_atom(true, _Encode) -> <<"true">>; +encode_atom(false, _Encode) -> <<"false">>; +encode_atom(Other, Encode) -> Encode(atom_to_binary(Other, utf8), Encode). + +-spec encode_integer(integer()) -> iodata(). +encode_integer(Integer) -> integer_to_binary(Integer). + +-spec encode_float(float()) -> iodata(). +encode_float(Float) -> float_to_binary(Float, [short]). + +-spec encode_list(list(), encoder()) -> iodata(). +encode_list(List, Encode) when is_list(List) -> + do_encode_list(List, Encode). + +do_encode_list([], _Encode) -> + <<"[]">>; +do_encode_list([First | Rest], Encode) when is_function(Encode, 2) -> + [$[, Encode(First, Encode) | list_loop(Rest, Encode)]. + +list_loop([], _Encode) -> "]"; +list_loop([Elem | Rest], Encode) -> [$,, Encode(Elem, Encode) | list_loop(Rest, Encode)]. + +-spec encode_map(encode_map(dynamic()), encoder()) -> iodata(). +encode_map(Map, Encode) when is_map(Map) -> + do_encode_map(Map, Encode). + +do_encode_map(Map, Encode) when is_function(Encode, 2) -> + encode_object([[$,, key(Key, Encode), $: | Encode(Value, Encode)] || Key := Value <- Map]). + +-spec encode_map_checked(map(), encoder()) -> iodata(). +encode_map_checked(Map, Encode) -> + do_encode_checked(maps:to_list(Map), Encode). + +-spec encode_key_value_list([{term(), term()}], encoder()) -> iodata(). +encode_key_value_list(List, Encode) when is_function(Encode, 2) -> + encode_object([[$,, key(Key, Encode), $: | Encode(Value, Encode)] || {Key, Value} <- List]). + +-spec encode_key_value_list_checked([{term(), term()}], encoder()) -> iodata(). +encode_key_value_list_checked(List, Encode) -> + do_encode_checked(List, Encode). + +do_encode_checked(List, Encode) when is_function(Encode, 2) -> + do_encode_checked(List, Encode, #{}). + +do_encode_checked([{Key, Value} | Rest], Encode, Visited0) -> + EncodedKey = iolist_to_binary(key(Key, Encode)), + case is_map_key(EncodedKey, Visited0) of + true -> + error({duplicate_key, Key}); + _ -> + Visited = Visited0#{EncodedKey => true}, + [$,, EncodedKey, $:, Encode(Value, Encode) | do_encode_checked(Rest, Encode, Visited)] + end; +do_encode_checked([], _, _) -> + []. + +%% Dispatching any value through `Encode` could allow incorrect +%% JSON to be emitted (with keys not being strings). To avoid this, +%% the default encoder only supports binaries, atoms, and numbers. +%% Customisation is possible by overriding how maps are encoded in general. +key(Key, Encode) when is_binary(Key) -> Encode(Key, Encode); +key(Key, Encode) when is_atom(Key) -> Encode(atom_to_binary(Key, utf8), Encode); +key(Key, _Encode) when is_integer(Key) -> [$", encode_integer(Key), $"]; +key(Key, _Encode) when is_float(Key) -> [$", encode_float(Key), $"]. + +encode_object([]) -> <<"{}">>; +encode_object([[_Comma | Entry] | Rest]) -> ["{", Entry, Rest, "}"]. + +-spec encode_binary(binary()) -> iodata(). +encode_binary(Bin) when is_binary(Bin) -> + escape_binary(Bin). + +-spec encode_binary_escape_all(binary()) -> iodata(). +encode_binary_escape_all(Bin) when is_binary(Bin) -> + escape_all(Bin). + +escape_binary(Bin) -> escape_binary_ascii(Bin, [$"], Bin, 0, 0). + +escape_binary_ascii(Binary, Acc, Orig, Skip, Len) -> + case Binary of + <> when ?are_all_ascii_plain(B1, B2, B3, B4, B5, B6, B7, B8) -> + escape_binary_ascii(Rest, Acc, Orig, Skip, Len + 8); + Other -> + escape_binary(Other, Acc, Orig, Skip, Len) + end. + +escape_binary(<>, Acc, Orig, Skip, Len) when ?is_ascii_plain(Byte) -> + %% we got here because there were either less than 8 bytes left + %% or we have an escape in the next 8 bytes, + %% escape_binary_ascii would fail and dispatch here anyway + escape_binary(Rest, Acc, Orig, Skip, Len + 1); +escape_binary(<>, Acc, Orig, Skip0, Len) when ?is_ascii_escape(Byte) -> + Escape = escape(Byte), + Skip = Skip0 + Len + 1, + case Len of + 0 -> + escape_binary_ascii(Rest, [Acc | Escape], Orig, Skip, 0); + _ -> + Part = binary_part(Orig, Skip0, Len), + escape_binary_ascii(Rest, [Acc, Part | Escape], Orig, Skip, 0) + end; +escape_binary(<>, Acc, Orig, Skip, Len) -> + case element(Byte - 127, utf8s0()) of + ?UTF8_REJECT -> invalid_byte(Orig, Skip + Len); + %% all accept cases are ASCII, already covered above + State -> escape_binary_utf8(Rest, Acc, Orig, Skip, Len, State) + end; +escape_binary(_, _Acc, Orig, 0, _Len) -> + [$", Orig, $"]; +escape_binary(_, Acc, _Orig, _Skip, 0) -> + [Acc, $"]; +escape_binary(_, Acc, Orig, Skip, Len) -> + Part = binary_part(Orig, Skip, Len), + [Acc, Part, $"]. + +escape_binary_utf8(<>, Acc, Orig, Skip, Len, State0) -> + Type = element(Byte + 1, utf8t()), + case element(State0 + Type, utf8s()) of + ?UTF8_ACCEPT -> escape_binary_ascii(Rest, Acc, Orig, Skip, Len + 2); + ?UTF8_REJECT -> invalid_byte(Orig, Skip + Len + 1); + State -> escape_binary_utf8(Rest, Acc, Orig, Skip, Len + 1, State) + end; +escape_binary_utf8(_, _Acc, Orig, Skip, Len, _State) -> + unexpected_utf8(Orig, Skip + Len + 1). + +escape_all(Bin) -> escape_all_ascii(Bin, [$"], Bin, 0, 0). + +escape_all_ascii(Binary, Acc, Orig, Skip, Len) -> + case Binary of + <> when ?are_all_ascii_plain(B1, B2, B3, B4, B5, B6, B7, B8) -> + escape_all_ascii(Rest, Acc, Orig, Skip, Len + 8); + Other -> + escape_all(Other, Acc, Orig, Skip, Len) + end. + +escape_all(<>, Acc, Orig, Skip, Len) when ?is_ascii_plain(Byte) -> + escape_all(Rest, Acc, Orig, Skip, Len + 1); +escape_all(<>, Acc, Orig, Skip, Len) when ?is_ascii_escape(Byte) -> + Escape = escape(Byte), + case Len of + 0 -> + escape_all(Rest, [Acc | Escape], Orig, Skip + 1, 0); + _ -> + Part = binary_part(Orig, Skip, Len), + escape_all(Rest, [Acc, Part | Escape], Orig, Skip + Len + 1, 0) + end; +escape_all(<>, Acc, Orig, Skip, 0) -> + escape_char(Rest, Acc, Orig, Skip, Char); +escape_all(<>, Acc, Orig, Skip, Len) -> + Part = binary_part(Orig, Skip, Len), + escape_char(Rest, [Acc | Part], Orig, Skip + Len, Char); +escape_all(<<>>, _Acc, Orig, 0, _Len) -> + [$", Orig, $"]; +escape_all(<<>>, Acc, _Orig, _Skip, 0) -> + [Acc, $"]; +escape_all(<<>>, Acc, Orig, Skip, Len) -> + Part = binary_part(Orig, Skip, Len), + [Acc, Part, $"]; +escape_all(_Other, _Acc, Orig, Skip, Len) -> + invalid_byte(Orig, Skip + Len). + +escape_char(<>, Acc, Orig, Skip, Char) when Char =< 16#FF -> + Acc1 = [Acc, "\\u00" | integer_to_binary(Char, 16)], + escape_all(Rest, Acc1, Orig, Skip + 2, 0); +escape_char(<>, Acc, Orig, Skip, Char) when Char =< 16#7FF -> + Acc1 = [Acc, "\\u0" | integer_to_binary(Char, 16)], + escape_all(Rest, Acc1, Orig, Skip + 2, 0); +escape_char(<>, Acc, Orig, Skip, Char) when Char =< 16#FFF -> + Acc1 = [Acc, "\\u0" | integer_to_binary(Char, 16)], + escape_all(Rest, Acc1, Orig, Skip + 3, 0); +escape_char(<>, Acc, Orig, Skip, Char) when Char =< 16#FFFF -> + Acc1 = [Acc, "\\u" | integer_to_binary(Char, 16)], + escape_all(Rest, Acc1, Orig, Skip + 3, 0); +escape_char(<>, Acc, Orig, Skip, Char0) -> + Char = Char0 - 16#10000, + First = integer_to_binary(16#800 bor (Char bsr 10), 16), + Second = integer_to_binary(16#C00 bor (Char band 16#3FF), 16), + Acc1 = [Acc, "\\uD", First, "\\uD" | Second], + escape_all(Rest, Acc1, Orig, Skip + 4, 0). + +-spec escape(byte()) -> binary() | no. +escape($\x00) -> <<"\\u0000">>; +escape($\x01) -> <<"\\u0001">>; +escape($\x02) -> <<"\\u0002">>; +escape($\x03) -> <<"\\u0003">>; +escape($\x04) -> <<"\\u0004">>; +escape($\x05) -> <<"\\u0005">>; +escape($\x06) -> <<"\\u0006">>; +escape($\x07) -> <<"\\u0007">>; +escape($\b) -> <<"\\b">>; +escape($\t) -> <<"\\t">>; +escape($\n) -> <<"\\n">>; +escape($\x0b) -> <<"\\u000B">>; +escape($\f) -> <<"\\f">>; +escape($\r) -> <<"\\r">>; +escape($\x0e) -> <<"\\u000E">>; +escape($\x0f) -> <<"\\u000F">>; +escape($\x10) -> <<"\\u0010">>; +escape($\x11) -> <<"\\u0011">>; +escape($\x12) -> <<"\\u0012">>; +escape($\x13) -> <<"\\u0013">>; +escape($\x14) -> <<"\\u0014">>; +escape($\x15) -> <<"\\u0015">>; +escape($\x16) -> <<"\\u0016">>; +escape($\x17) -> <<"\\u0017">>; +escape($\x18) -> <<"\\u0018">>; +escape($\x19) -> <<"\\u0019">>; +escape($\x1A) -> <<"\\u001A">>; +escape($\x1B) -> <<"\\u001B">>; +escape($\x1C) -> <<"\\u001C">>; +escape($\x1D) -> <<"\\u001D">>; +escape($\x1E) -> <<"\\u001E">>; +escape($\x1F) -> <<"\\u001F">>; +escape($") -> <<"\\\"">>; +escape($\\) -> <<"\\\\">>; +escape(_) -> no. + +%% This is an adapted table from "Flexible and Economical UTF-8 Decoding" by Bjoern Hoehrmann. +%% http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ + +%% Map character to character class +utf8t() -> + { + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 9, + 9, + 9, + 9, + 9, + 9, + 9, + 9, + 9, + 9, + 9, + 9, + 9, + 9, + 9, + 9, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 7, + 8, + 8, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 2, + 10, + 3, + 3, + 3, + 3, + 3, + 3, + 3, + 3, + 3, + 3, + 3, + 3, + 4, + 3, + 3, + 11, + 6, + 6, + 6, + 5, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8, + 8 + }. + +%% Transition table mapping combination of state & class to next state +utf8s() -> + { + 12, + 24, + 36, + 60, + 96, + 84, + 12, + 12, + 12, + 48, + 72, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 0, + 12, + 12, + 12, + 12, + 12, + 0, + 12, + 0, + 12, + 12, + 12, + 24, + 12, + 12, + 12, + 12, + 12, + 24, + 12, + 24, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 24, + 12, + 12, + 12, + 12, + 12, + 24, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 24, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 36, + 12, + 36, + 12, + 12, + 12, + 36, + 12, + 12, + 12, + 12, + 12, + 36, + 12, + 36, + 12, + 12, + 12, + 36, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12 + }. + +%% Optimisation for 1st byte direct state lookup, +%% we know starting state is 0 and ASCII bytes were already handled +utf8s0() -> + { + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 24, + 48, + 36, + 36, + 36, + 36, + 36, + 36, + 36, + 36, + 36, + 36, + 36, + 36, + 60, + 36, + 36, + 72, + 84, + 84, + 84, + 96, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12, + 12 + }. + +invalid_byte(Bin, Skip) -> + Byte = binary:at(Bin, Skip), + error({invalid_byte, Byte}, none, error_info(Skip)). + +error_info(Skip) -> + [{error_info, #{cause => #{position => Skip}}}]. + +%% +%% Decoding implementation +%% + +-define(ARRAY, array). +-define(OBJECT, object). + +-type from_binary_fun() :: fun((binary()) -> dynamic()). +-type array_start_fun() :: fun((Acc :: dynamic()) -> ArrayAcc :: dynamic()). +-type array_push_fun() :: fun((Value :: dynamic(), Acc :: dynamic()) -> NewAcc :: dynamic()). +-type array_finish_fun() :: fun((ArrayAcc :: dynamic(), OldAcc :: dynamic()) -> {dynamic(), dynamic()}). +-type object_start_fun() :: fun((Acc :: dynamic()) -> ObjectAcc :: dynamic()). +-type object_push_fun() :: fun((Key :: dynamic(), Value :: dynamic(), Acc :: dynamic()) -> NewAcc :: dynamic()). +-type object_finish_fun() :: fun((ObjectAcc :: dynamic(), OldAcc :: dynamic()) -> {dynamic(), dynamic()}). + +-type decoders() :: #{ + array_start => array_start_fun(), + array_push => array_push_fun(), + array_finish => array_finish_fun(), + object_start => object_start_fun(), + object_push => object_push_fun(), + object_finish => object_finish_fun(), + float => from_binary_fun(), + integer => from_binary_fun(), + string => from_binary_fun(), + null => term() +}. + +-record(decode, { + array_start :: array_start_fun() | undefined, + array_push :: array_push_fun() | undefined, + array_finish :: array_finish_fun() | undefined, + object_start :: object_start_fun() | undefined, + object_push :: object_push_fun() | undefined, + object_finish :: object_finish_fun() | undefined, + float = fun erlang:binary_to_float/1 :: from_binary_fun(), + integer = fun erlang:binary_to_integer/1 :: from_binary_fun(), + string :: from_binary_fun() | undefined, + null = null :: term() +}). + +-type acc() :: dynamic(). +-type stack() :: [?ARRAY | ?OBJECT | binary() | acc()]. +-type decode() :: #decode{}. + +-opaque continuation_state() :: tuple(). + +-type decode_value() :: + integer() + | float() + | boolean() + | null + | binary() + | list(decode_value()) + | #{binary() => decode_value()}. + +-spec decode(binary()) -> decode_value(). +decode(Binary) when is_binary(Binary) -> + case value(Binary, Binary, 0, ok, [], #decode{}) of + {Result, _Acc, <<>>} -> + Result; + {_, _, Rest} -> + invalid_byte(Rest, 0); + {continue, {_Bin, _Acc, [], _Decode, {number, Number}}} -> + Number; + {continue, {_, _, _, _, {float_error, Token, Skip}}} -> + unexpected_sequence(Token, Skip); + {continue, _} -> + error(unexpected_end) + end. + +-spec decode(binary(), dynamic(), decoders()) -> + {Result :: dynamic(), Acc :: dynamic(), binary()}. +decode(Binary, Acc0, Decoders) when is_binary(Binary) -> + Decode = maps:fold(fun parse_decoder/3, #decode{}, Decoders), + case value(Binary, Binary, 0, Acc0, [], Decode) of + {continue, {_Bin, Acc, [], _Decode, {number, Val}}} -> + {Val, Acc, <<>>}; + {continue, {_, _, _, _, {float_error, Token, Skip}}} -> + unexpected_sequence(Token, Skip); + {continue, _} -> + error(unexpected_end); + Result -> + Result + end. + +-spec decode_start(binary(), dynamic(), decoders()) -> + {Result :: dynamic(), Acc :: dynamic(), binary()} | {continue, continuation_state()}. +decode_start(Binary, Acc, Decoders) when is_binary(Binary) -> + Decode = maps:fold(fun parse_decoder/3, #decode{}, Decoders), + value(Binary, Binary, 0, Acc, [], Decode). + +-spec decode_continue(binary() | end_of_input, Opaque :: term()) -> + {Result :: dynamic(), Acc :: dynamic(), binary()} | {continue, continuation_state()}. +decode_continue(end_of_input, State) -> + case State of + {_, Acc, [], _Decode, {number, Val}} -> + {Val, Acc, <<>>}; + {_, _, _, _, {float_error, Token, Skip}} -> + unexpected_sequence(Token, Skip); + _ -> + error(unexpected_end) + end; +decode_continue(Cont, {Rest, Acc, Stack, #decode{} = Decode, FuncData}) when is_binary(Cont) -> + Binary = <>, + case FuncData of + value -> + value(Binary, Binary, 0, Acc, Stack, Decode); + {number, _} -> + value(Binary, Binary, 0, Acc, Stack, Decode); + {float_error, _Token, _Skip} -> + value(Binary, Binary, 0, Acc, Stack, Decode); + {array_push, Val} -> + array_push(Binary, Binary, 0, Acc, Stack, Decode, Val); + {object_value, Key} -> + object_value(Binary, Binary, 0, Acc, Stack, Decode, Key); + {object_push, Value, Key} -> + object_push(Binary, Binary, 0, Acc, Stack, Decode, Value, Key); + object_key -> + object_key(Binary, Binary, 0, Acc, Stack, Decode) + end. + +parse_decoder(array_start, Fun, Decode) when is_function(Fun, 1) -> + Decode#decode{array_start = Fun}; +parse_decoder(array_push, Fun, Decode) when is_function(Fun, 2) -> + Decode#decode{array_push = Fun}; +parse_decoder(array_finish, Fun, Decode) when is_function(Fun, 2) -> + Decode#decode{array_finish = Fun}; +parse_decoder(object_start, Fun, Decode) when is_function(Fun, 1) -> + Decode#decode{object_start = Fun}; +parse_decoder(object_push, Fun, Decode) when is_function(Fun, 3) -> + Decode#decode{object_push = Fun}; +parse_decoder(object_finish, Fun, Decode) when is_function(Fun, 2) -> + Decode#decode{object_finish = Fun}; +parse_decoder(float, Fun, Decode) when is_function(Fun, 1) -> + Decode#decode{float = Fun}; +parse_decoder(integer, Fun, Decode) when is_function(Fun, 1) -> + Decode#decode{integer = Fun}; +parse_decoder(string, Fun, Decode) when is_function(Fun, 1) -> + Decode#decode{string = Fun}; +parse_decoder(null, Null, Decode) -> + Decode#decode{null = Null}. + +value(<>, Original, Skip, Acc, Stack, Decode) when ?is_ws(Byte) -> + value(Rest, Original, Skip + 1, Acc, Stack, Decode); +value(<<$0, Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + number_zero(Rest, Original, Skip, Acc, Stack, Decode, 1); +value(<>, Original, Skip, Acc, Stack, Decode) when ?is_1_to_9(Byte) -> + number(Rest, Original, Skip, Acc, Stack, Decode, 1); +value(<<$-, Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + number_minus(Rest, Original, Skip, Acc, Stack, Decode); +value(<<$t, Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + true(Rest, Original, Skip, Acc, Stack, Decode); +value(<<$f, Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + false(Rest, Original, Skip, Acc, Stack, Decode); +value(<<$n, Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + null(Rest, Original, Skip, Acc, Stack, Decode); +value(<<$", Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + string(Rest, Original, Skip + 1, Acc, Stack, Decode); +value(<<$[, Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + array_start(Rest, Original, Skip, Acc, Stack, Decode, 1); +value(<<${, Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + object_start(Rest, Original, Skip, Acc, Stack, Decode, 1); +value(<>, Original, Skip, _Acc, _Stack, _Decode) when ?is_ascii_plain(Byte) -> + %% this clause is effecively the same as the last one, but necessary to + %% force compiler to emit a jump table dispatch, rather than binary search + invalid_byte(Original, Skip); +value(_, Original, Skip, Acc, Stack, Decode) -> + unexpected(Original, Skip, Acc, Stack, Decode, 0, 0, value). + +true(<<"rue", Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + continue(Rest, Original, Skip + 4, Acc, Stack, Decode, true); +true(_Rest, Original, Skip, Acc, Stack, Decode) -> + unexpected(Original, Skip, Acc, Stack, Decode, 1, 3, value). + +false(<<"alse", Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + continue(Rest, Original, Skip + 5, Acc, Stack, Decode, false); +false(_Rest, Original, Skip, Acc, Stack, Decode) -> + unexpected(Original, Skip, Acc, Stack, Decode, 1, 4, value). + +null(<<"ull", Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + continue(Rest, Original, Skip + 4, Acc, Stack, Decode, Decode#decode.null); +null(_Rest, Original, Skip, Acc, Stack, Decode) -> + unexpected(Original, Skip, Acc, Stack, Decode, 1, 3, value). + +number_minus(<<$0, Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + number_zero(Rest, Original, Skip, Acc, Stack, Decode, 2); +number_minus(<>, Original, Skip, Acc, Stack, Decode) when ?is_1_to_9(Num) -> + number(Rest, Original, Skip, Acc, Stack, Decode, 2); +number_minus(_Rest, Original, Skip, Acc, Stack, Decode) -> + unexpected(Original, Skip, Acc, Stack, Decode, 1, 0, value). + +number_zero(<<$., Rest/bits>>, Original, Skip, Acc, Stack, Decode, Len) -> + number_frac(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +number_zero(<>, Original, Skip, Acc, Stack, Decode, Len) when E =:= $E; E =:= $e -> + number_exp_copy(Rest, Original, Skip, Acc, Stack, Decode, Len + 1, <<"0">>); +number_zero(<<>>, Original, Skip, Acc, Stack, Decode, Len) -> + unexpected(Original, Skip, Acc, Stack, Decode, Len, 0, {number, 0}); +number_zero(Rest, Original, Skip, Acc, Stack, Decode, Len) -> + continue(Rest, Original, Skip + Len, Acc, Stack, Decode, 0). + +number(<>, Original, Skip, Acc, Stack, Decode, Len) when ?is_0_to_9(Num) -> + number(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +number(<<$., Rest/bits>>, Original, Skip, Acc, Stack, Decode, Len) -> + number_frac(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +number(<>, Original, Skip, Acc, Stack, Decode, Len) when E =:= $E; E =:= $e -> + Prefix = binary_part(Original, Skip, Len), + number_exp_copy(Rest, Original, Skip, Acc, Stack, Decode, Len + 1, Prefix); +number(<<>>, Original, Skip, Acc, Stack, Decode, Len) -> + Int = (Decode#decode.integer)(binary_part(Original, Skip, Len)), + unexpected(Original, Skip, Acc, Stack, Decode, Len, 0, {number, Int}); +number(Rest, Original, Skip, Acc, Stack, Decode, Len) -> + Int = (Decode#decode.integer)(binary_part(Original, Skip, Len)), + continue(Rest, Original, Skip + Len, Acc, Stack, Decode, Int). + +number_frac(<>, Original, Skip, Acc, Stack, Decode, Len) when ?is_0_to_9(Byte) -> + number_frac_cont(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +number_frac(_, Original, Skip, Acc, Stack, Decode, Len) -> + unexpected(Original, Skip, Acc, Stack, Decode, Len, 0, value). + +number_frac_cont(<>, Original, Skip, Acc, Stack, Decode, Len) when ?is_0_to_9(Byte) -> + number_frac_cont(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +number_frac_cont(<>, Original, Skip, Acc, Stack, Decode, Len) when E =:= $e; E =:= $E -> + number_exp(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +number_frac_cont(Rest, Original, Skip, Acc, Stack, Decode, Len) -> + Token = binary_part(Original, Skip, Len), + float_decode(Rest, Original, Skip, Acc, Stack, Decode, Len, Token). + +float_decode(<<>>, Original, Skip, Acc, Stack, Decode, Len, Token) -> + try (Decode#decode.float)(Token) of + Float -> unexpected(Original, Skip, Acc, Stack, Decode, Len, 0, {number, Float}) + catch + _:_ -> unexpected(Original, Skip, Acc, Stack, Decode, Len, 0, {float_error, Token, Skip}) + end; +float_decode(<>, Original, Skip, Acc, Stack, Decode, Len, Token) -> + try (Decode#decode.float)(Token) of + Float -> + continue(Rest, Original, Skip + Len, Acc, Stack, Decode, Float) + catch + _:_ -> unexpected_sequence(Token, Skip) + end. + +number_exp(<>, Original, Skip, Acc, Stack, Decode, Len) when ?is_0_to_9(Byte) -> + number_exp_cont(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +number_exp(<>, Original, Skip, Acc, Stack, Decode, Len) when Sign =:= $+; Sign =:= $- -> + number_exp_sign(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +number_exp(_, Original, Skip, Acc, Stack, Decode, Len) -> + unexpected(Original, Skip, Acc, Stack, Decode, Len, 0, value). + +number_exp_sign(<>, Original, Skip, Acc, Stack, Decode, Len) when ?is_0_to_9(Byte) -> + number_exp_cont(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +number_exp_sign(_, Original, Skip, Acc, Stack, Decode, Len) -> + unexpected(Original, Skip, Acc, Stack, Decode, Len, 0, value). + +number_exp_cont(<>, Original, Skip, Acc, Stack, Decode, Len) when ?is_0_to_9(Byte) -> + number_exp_cont(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +number_exp_cont(Rest, Original, Skip, Acc, Stack, Decode, Len) -> + Token = binary_part(Original, Skip, Len), + float_decode(Rest, Original, Skip, Acc, Stack, Decode, Len, Token). + +number_exp_copy(<>, Original, Skip, Acc, Stack, Decode, Len, Prefix) when ?is_0_to_9(Byte) -> + number_exp_cont(Rest, Original, Skip, Acc, Stack, Decode, Len, Prefix, 1); +number_exp_copy(<>, Original, Skip, Acc, Stack, Decode, Len, Prefix) when Sign =:= $+; Sign =:= $- -> + number_exp_sign(Rest, Original, Skip, Acc, Stack, Decode, Len, Prefix, 1); +number_exp_copy(_, Original, Skip, Acc, Stack, Decode, Len, _Prefix) -> + unexpected(Original, Skip, Acc, Stack, Decode, Len, 0, value). + +number_exp_sign(<>, Original, Skip, Acc, Stack, Decode, Len, Prefix, ExpLen) when ?is_0_to_9(Byte) -> + number_exp_cont(Rest, Original, Skip, Acc, Stack, Decode, Len, Prefix, ExpLen + 1); +number_exp_sign(_, Original, Skip, Acc, Stack, Decode, Len, _Prefix, ExpLen) -> + unexpected(Original, Skip, Acc, Stack, Decode, Len + ExpLen, 0, value). + +number_exp_cont(<>, Original, Skip, Acc, Stack, Decode, Len, Prefix, ExpLen) when ?is_0_to_9(Byte) -> + number_exp_cont(Rest, Original, Skip, Acc, Stack, Decode, Len, Prefix, ExpLen + 1); +number_exp_cont(Rest, Original, Skip, Acc, Stack, Decode, Len, Prefix, ExpLen) -> + Suffix = binary_part(Original, Skip + Len, ExpLen), + Token = <>, + float_decode(Rest, Original, Skip, Acc, Stack, Decode, Len + ExpLen, Token). + +string(Binary, Original, Skip, Acc, Stack, Decode) -> + string_ascii(Binary, Original, Skip, Acc, Stack, Decode, 0). + +string_ascii(Binary, Original, Skip, Acc, Stack, Decode, Len) -> + case Binary of + <> when ?are_all_ascii_plain(B1, B2, B3, B4, B5, B6, B7, B8) -> + string_ascii(Rest, Original, Skip, Acc, Stack, Decode, Len + 8); + Other -> + string(Other, Original, Skip, Acc, Stack, Decode, Len) + end. + +-spec string(binary(), binary(), integer(), acc(), stack(), decode(), integer()) -> dynamic(). +string(<>, Orig, Skip, Acc, Stack, Decode, Len) when ?is_ascii_plain(Byte) -> + string(Rest, Orig, Skip, Acc, Stack, Decode, Len + 1); +string(<<$\\, Rest/bits>>, Orig, Skip, Acc, Stack, Decode, Len) -> + Part = binary_part(Orig, Skip, Len), + SAcc = <<>>, + unescape(Rest, Orig, Skip, Acc, Stack, Decode, Skip - 1, Len, <>); +string(<<$", Rest/bits>>, Orig, Skip0, Acc, Stack, Decode, Len) -> + Value = binary_part(Orig, Skip0, Len), + Skip = Skip0 + Len + 1, + case Decode#decode.string of + undefined -> continue(Rest, Orig, Skip, Acc, Stack, Decode, Value); + Fun -> continue(Rest, Orig, Skip, Acc, Stack, Decode, Fun(Value)) + end; +string(<>, Orig, Skip, _Acc, _Stack, _Decode, Len) when ?is_ascii_escape(Byte) -> + invalid_byte(Orig, Skip + Len); +string(<>, Orig, Skip, Acc, Stack, Decode, Len) -> + case element(Byte - 127, utf8s0()) of + ?UTF8_REJECT -> invalid_byte(Orig, Skip + Len); + %% all accept cases are ASCII, already covered above + State -> string_utf8(Rest, Orig, Skip, Acc, Stack, Decode, Len, State) + end; +string(_, Orig, Skip, Acc, Stack, Decode, Len) -> + unexpected(Orig, Skip - 1, Acc, Stack, Decode, Len + 1, 0, value). + +string_utf8(<>, Orig, Skip, Acc, Stack, Decode, Len, State0) -> + Type = element(Byte + 1, utf8t()), + case element(State0 + Type, utf8s()) of + ?UTF8_ACCEPT -> string_ascii(Rest, Orig, Skip, Acc, Stack, Decode, Len + 2); + ?UTF8_REJECT -> invalid_byte(Orig, Skip + Len + 1); + State -> string_utf8(Rest, Orig, Skip, Acc, Stack, Decode, Len + 1, State) + end; +string_utf8(_, Orig, Skip, Acc, Stack, Decode, Len, _State0) -> + unexpected(Orig, Skip - 1, Acc, Stack, Decode, Len + 2, 0, value). + +string_ascii(Binary, Original, Skip, Acc, Stack, Decode, Start, Len, SAcc) -> + case Binary of + <> when ?are_all_ascii_plain(B1, B2, B3, B4, B5, B6, B7, B8) -> + string_ascii(Rest, Original, Skip, Acc, Stack, Decode, Start, Len + 8, SAcc); + Other -> + string(Other, Original, Skip, Acc, Stack, Decode, Start, Len, SAcc) + end. + +-spec string(binary(), binary(), integer(), acc(), stack(), decode(), integer(), integer(), binary()) -> dynamic(). +string(<>, Orig, Skip, Acc, Stack, Decode, Start, Len, SAcc) when ?is_ascii_plain(Byte) -> + string(Rest, Orig, Skip, Acc, Stack, Decode, Start, Len + 1, SAcc); +string(<<$\\, Rest/bits>>, Orig, Skip, Acc, Stack, Decode, Start, Len, SAcc) -> + Part = binary_part(Orig, Skip, Len), + unescape(Rest, Orig, Skip, Acc, Stack, Decode, Start, Len, <>); +string(<<$", Rest/bits>>, Orig, Skip0, Acc, Stack, Decode, _Start, Len, SAcc) -> + Part = binary_part(Orig, Skip0, Len), + Value = <>, + Skip = Skip0 + Len + 1, + case Decode#decode.string of + undefined -> continue(Rest, Orig, Skip, Acc, Stack, Decode, Value); + Fun -> continue(Rest, Orig, Skip, Acc, Stack, Decode, Fun(Value)) + end; +string(<>, Orig, Skip, _Acc, _Stack, _Decode, _Start, Len, _SAcc) when ?is_ascii_escape(Byte) -> + invalid_byte(Orig, Skip + Len); +string(<>, Orig, Skip, Acc, Stack, Decode, Start, Len, SAcc) -> + case element(Byte - 127, utf8s0()) of + ?UTF8_REJECT -> invalid_byte(Orig, Skip + Len); + %% all accept cases are ASCII, already covered above + State -> string_utf8(Rest, Orig, Skip, Acc, Stack, Decode, Start, Len, SAcc, State) + end; +string(_, Orig, Skip, Acc, Stack, Decode, Start, Len, _SAcc) -> + Extra = Skip - Start, + unexpected(Orig, Start, Acc, Stack, Decode, Len + Extra, 0, value). + +string_utf8(<>, Orig, Skip, Acc, Stack, Decode, Start, Len, SAcc, State0) -> + Type = element(Byte + 1, utf8t()), + case element(State0 + Type, utf8s()) of + ?UTF8_ACCEPT -> string_ascii(Rest, Orig, Skip, Acc, Stack, Decode, Start, Len + 2, SAcc); + ?UTF8_REJECT -> invalid_byte(Orig, Skip + Len + 1); + State -> string_utf8(Rest, Orig, Skip, Acc, Stack, Decode, Start, Len + 1, SAcc, State) + end; +string_utf8(_, Orig, Skip, Acc, Stack, Decode, Start, Len, _SAcc, _State0) -> + Extra = Skip - Start, + unexpected(Orig, Start, Acc, Stack, Decode, Len + 1 + Extra, 0, value). + +unescape(<>, Original, Skip, Acc, Stack, Decode, Start, Len, SAcc) -> + Val = + case Byte of + $b -> $\b; + $f -> $\f; + $n -> $\n; + $r -> $\r; + $t -> $\t; + $" -> $"; + $\\ -> $\\; + $/ -> $/; + $u -> unicode; + _ -> error + end, + case Val of + unicode -> unescapeu(Rest, Original, Skip, Acc, Stack, Decode, Start, Len, SAcc); + error -> invalid_byte(Original, Skip + Len + 1); + Int -> string_ascii(Rest, Original, Skip + Len + 2, Acc, Stack, Decode, Start, 0, <>) + end; +unescape(_, Original, Skip, Acc, Stack, Decode, Start, Len, _SAcc) -> + Extra = Skip - Start, + unexpected(Original, Start, Acc, Stack, Decode, Len + 1 + Extra, 0, value). + +unescapeu(<>, Original, Skip, Acc, Stack, Decode, Start, Len, SAcc) -> + try hex_to_int(E1, E2, E3, E4) of + CP when CP >= 16#D800, CP =< 16#DBFF -> + unescape_surrogate(Rest, Original, Skip, Acc, Stack, Decode, Start, Len, SAcc, CP); + CP -> + try <> of + SAcc1 -> string_ascii(Rest, Original, Skip + Len + 6, Acc, Stack, Decode, Start, 0, SAcc1) + catch + _:_ -> unexpected_sequence(binary_part(Original, Skip + Len, 6), Skip + Len) + end + catch + _:_ -> + unexpected_sequence(binary_part(Original, Skip + Len, 6), Skip + Len) + end; +unescapeu(_Rest, Original, Skip, Acc, Stack, Decode, Start, Len, _SAcc) -> + Extra = Skip - Start, + unexpected(Original, Start, Acc, Stack, Decode, Len + 2 + Extra, 4, value). + +unescape_surrogate(<<"\\u", E1, E2, E3, E4, Rest/bits>>, Original, Skip, Acc, Stack, Decode, Start, Len, SAcc, Hi) -> + try hex_to_int(E1, E2, E3, E4) of + Lo when Lo >= 16#DC00, Lo =< 16#DFFF -> + CP = 16#10000 + ((Hi band 16#3FF) bsl 10) + (Lo band 16#3FF), + try <> of + SAcc1 -> string_ascii(Rest, Original, Skip + Len + 12, Acc, Stack, Decode, Start, 0, SAcc1) + catch + _:_ -> unexpected_sequence(binary_part(Original, Skip + Len, 12), Skip + Len) + end; + _ -> + unexpected_sequence(binary_part(Original, Skip + Len, 12), Skip + Len) + catch + _:_ -> unexpected_sequence(binary_part(Original, Skip + Len, 12), Skip + Len) + end; +unescape_surrogate(_Rest, Original, Skip, Acc, Stack, Decode, Start, Len, _SAcc, _Hi) -> + Extra = Skip - Start, + unexpected(Original, Start, Acc, Stack, Decode, Len + 6 + Extra, 5, value). + +%% erlfmt-ignore +%% this is a macro instead of an inlined function - compiler refused to inline +-define(hex_digit(C), element(C - $0 + 1, { + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, n, n, n, n, n, %% 0x30 + n, n, 10,11,12,13,14,15,n, n, n, n, n, n, n, %% 0x40 + n, n, n, n, n, n, n, n, n, n, n, n, n, n, n, %% 0x50 + n, n, n, n, 10,11,12,13,14,15 %% 0x60 +})). + +-spec hex_to_int(byte(), byte(), byte(), byte()) -> integer(). +hex_to_int(H1, H2, H3, H4) -> + ?hex_digit(H4) + 16 * (?hex_digit(H3) + 16 * (?hex_digit(H2) + 16 * ?hex_digit(H1))). + +array_start(<>, Original, Skip, Acc, Stack, Decode, Len) when ?is_ws(Byte) -> + array_start(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +array_start(<<"]", Rest/bits>>, Original, Skip, Acc, Stack, Decode, Len) -> + {Value, NewAcc} = + case {Decode#decode.array_start, Decode#decode.array_finish} of + {undefined, undefined} -> {[], Acc}; + {Start, undefined} -> {lists:reverse(Start(Acc)), Acc}; + {undefined, Finish} -> Finish([], Acc); + {Start, Finish} -> Finish(Start(Acc), Acc) + end, + continue(Rest, Original, Skip + Len + 1, NewAcc, Stack, Decode, Value); +array_start(<<>>, Original, Skip, Acc, Stack, Decode, Len) -> + %% Handles empty array [] in continuation mode + unexpected(Original, Skip, Acc, Stack, Decode, Len, 0, value); +array_start(Rest, Original, Skip, OldAcc, Stack, Decode, Len) -> + case Decode#decode.array_start of + undefined -> value(Rest, Original, Skip + Len, [], [?ARRAY, OldAcc | Stack], Decode); + Fun -> value(Rest, Original, Skip + Len, Fun(OldAcc), [?ARRAY, OldAcc | Stack], Decode) + end. + +array_push(<>, Original, Skip, Acc, Stack, Decode, Value) when ?is_ws(Byte) -> + array_push(Rest, Original, Skip + 1, Acc, Stack, Decode, Value); +array_push(<<"]", Rest/bits>>, Original, Skip, Acc0, Stack0, Decode, Value) -> + Acc = + case Decode#decode.array_push of + undefined -> [Value | Acc0]; + Push -> Push(Value, Acc0) + end, + [_, OldAcc | Stack] = Stack0, + {ArrayValue, NewAcc} = + case Decode#decode.array_finish of + undefined -> {lists:reverse(Acc), OldAcc}; + Finish -> Finish(Acc, OldAcc) + end, + continue(Rest, Original, Skip + 1, NewAcc, Stack, Decode, ArrayValue); +array_push(<<$,, Rest/bits>>, Original, Skip0, Acc, Stack, Decode, Value) -> + Skip = Skip0 + 1, + case Decode#decode.array_push of + undefined -> value(Rest, Original, Skip, [Value | Acc], Stack, Decode); + Fun -> value(Rest, Original, Skip, Fun(Value, Acc), Stack, Decode) + end; +array_push(_, Original, Skip, Acc, Stack, Decode, Value) -> + unexpected(Original, Skip, Acc, Stack, Decode, 0, 0, {?FUNCTION_NAME, Value}). + +object_start(<>, Original, Skip, Acc, Stack, Decode, Len) when ?is_ws(Byte) -> + object_start(Rest, Original, Skip, Acc, Stack, Decode, Len + 1); +object_start(<<"}", Rest/bits>>, Original, Skip, Acc, Stack, Decode, Len) -> + {Value, NewAcc} = + case {Decode#decode.object_start, Decode#decode.object_finish} of + {undefined, undefined} -> {#{}, Acc}; + {Start, undefined} -> {maps:from_list(Start(Acc)), Acc}; + {undefined, Finish} -> Finish([], Acc); + {Start, Finish} -> Finish(Start(Acc), Acc) + end, + continue(Rest, Original, Skip + Len + 1, NewAcc, Stack, Decode, Value); +object_start(<<$", Rest/bits>>, Original, Skip0, OldAcc, Stack0, Decode, Len) -> + Stack = [?OBJECT, OldAcc | Stack0], + Skip = Skip0 + Len + 1, + case Decode#decode.object_start of + undefined -> + string(Rest, Original, Skip, [], Stack, Decode); + Fun -> + Acc = Fun(OldAcc), + string(Rest, Original, Skip, Acc, Stack, Decode) + end; +object_start(_, Original, Skip, Acc, Stack, Decode, Len) -> + unexpected(Original, Skip, Acc, Stack, Decode, Len, 0, value). + +object_value(<>, Original, Skip, Acc, Stack, Decode, Key) when ?is_ws(Byte) -> + object_value(Rest, Original, Skip + 1, Acc, Stack, Decode, Key); +object_value(<<$:, Rest/bits>>, Original, Skip, Acc, Stack, Decode, Key) -> + value(Rest, Original, Skip + 1, Acc, [Key | Stack], Decode); +object_value(_, Original, Skip, Acc, Stack, Decode, Key) -> + unexpected(Original, Skip, Acc, Stack, Decode, 0, 0, {?FUNCTION_NAME, Key}). + +object_push(<>, Original, Skip, Acc, Stack, Decode, Value, Key) when ?is_ws(Byte) -> + object_push(Rest, Original, Skip + 1, Acc, Stack, Decode, Value, Key); +object_push(<<"}", Rest/bits>>, Original, Skip, Acc0, Stack0, Decode, Value, Key) -> + Acc = + case Decode#decode.object_push of + undefined -> [{Key, Value} | Acc0]; + Fun -> Fun(Key, Value, Acc0) + end, + [_, OldAcc | Stack] = Stack0, + {ObjectValue, NewAcc} = + case Decode#decode.object_finish of + undefined -> {maps:from_list(Acc), OldAcc}; + Finish -> Finish(Acc, OldAcc) + end, + continue(Rest, Original, Skip + 1, NewAcc, Stack, Decode, ObjectValue); +object_push(<<$,, Rest/bits>>, Original, Skip, Acc0, Stack, Decode, Value, Key) -> + case Decode#decode.object_push of + undefined -> object_key(Rest, Original, Skip + 1, [{Key, Value} | Acc0], Stack, Decode); + Fun -> object_key(Rest, Original, Skip + 1, Fun(Key, Value, Acc0), Stack, Decode) + end; +object_push(_, Original, Skip, Acc, Stack, Decode, Value, Key) -> + unexpected(Original, Skip, Acc, Stack, Decode, 0, 0, {?FUNCTION_NAME, Value, Key}). + +object_key(<>, Original, Skip, Acc, Stack, Decode) when ?is_ws(Byte) -> + object_key(Rest, Original, Skip + 1, Acc, Stack, Decode); +object_key(<<$", Rest/bits>>, Original, Skip, Acc, Stack, Decode) -> + string(Rest, Original, Skip + 1, Acc, Stack, Decode); +object_key(_, Original, Skip, Acc, Stack, Decode) -> + unexpected(Original, Skip, Acc, Stack, Decode, 0, 0, ?FUNCTION_NAME). + +continue(<>, Original, Skip, Acc, Stack0, Decode, Value) -> + case Stack0 of + [] -> terminate(Rest, Original, Skip, Acc, Value); + [?ARRAY | _] -> array_push(Rest, Original, Skip, Acc, Stack0, Decode, Value); + [?OBJECT | _] -> object_value(Rest, Original, Skip, Acc, Stack0, Decode, Value); + [Key | Stack] -> object_push(Rest, Original, Skip, Acc, Stack, Decode, Value, Key) + end. + +terminate(<>, Original, Skip, Acc, Value) when ?is_ws(Byte) -> + terminate(Rest, Original, Skip + 1, Acc, Value); +terminate(<>, _Original, _Skip, Acc, Value) -> + {Value, Acc, Rest}. + +-spec unexpected_utf8(binary(), non_neg_integer()) -> no_return(). +unexpected_utf8(Original, Skip) when byte_size(Original) =:= Skip -> + error(unexpected_end); +unexpected_utf8(Original, Skip) -> + invalid_byte(Original, Skip). + +unexpected(Original, Skip, Acc, Stack, Decode, Pos, Len, FuncData) -> + RequiredSize = Skip + Pos + Len, + OrigSize = byte_size(Original), + case OrigSize =< RequiredSize of + true -> + <<_:Skip/binary, Rest/binary>> = Original, + {continue, {Rest, Acc, Stack, Decode, FuncData}}; + false -> + invalid_byte(Original, Skip + Pos) + end. + +-spec unexpected_sequence(binary(), non_neg_integer()) -> no_return(). +unexpected_sequence(Value, Skip) -> + error({unexpected_sequence, Value}, none, error_info(Skip)). diff --git a/prelude/export_exe.bzl b/prelude/export_exe.bzl index af37ff6b0dc..653d32bc639 100644 --- a/prelude/export_exe.bzl +++ b/prelude/export_exe.bzl @@ -9,9 +9,6 @@ def _export_exe_impl(ctx: AnalysisContext) -> list[Provider]: if ctx.attrs.src and ctx.attrs.exe: fail("Must supply one of src or exe to export_exe") - if not ctx.attrs.src and not ctx.attrs.exe: - fail("Must supply one of src or exe to export_exe") - src = ctx.attrs.src if ctx.attrs.src else ctx.attrs.exe return [ @@ -21,7 +18,7 @@ def _export_exe_impl(ctx: AnalysisContext) -> list[Provider]: ), ] -export_exe = rule( +_export_exe = rule( doc = """Exports a file as an executable, for use in $(exe) macros or as a valid target for an exec_dep(). Accepts either a string `src`, which is a relative path to a file that will be directly referenced, or an arg `exe` which should be a path to an executable relative to a $(location) macro. @@ -45,7 +42,7 @@ export_exe = rule( src = "bin/script.sh", ) - The latter form allows executing checked in binaries with required resouces (eg. runtime shared libraries) + The latter form allows executing checked in binaries with required resources (eg. runtime shared libraries) without unnecessary indirection via another rule which allows args, like command_alias. Eg. instead of export_file( @@ -86,3 +83,24 @@ export_exe = rule( "src": attrs.option(attrs.source(), default = None, doc = "path to an executable binary relative to this package"), }, ) + +def export_exe(name, exe = None, src = None, **kwargs): + # If neither `exe` nor `src` is passed, treat the target's name as the src. + # + # export_exe( + # name = "script.sh", + # ) + # + # is equivalent to: + # + # export_exe( + # name = "script.sh", + # src = "script.sh", + # ) + # + _export_exe( + name = name, + exe = exe, + src = src if (exe or src) else name, + **kwargs + ) diff --git a/prelude/genrule.bzl b/prelude/genrule.bzl index a7963df8ff3..951ea1a1904 100644 --- a/prelude/genrule.bzl +++ b/prelude/genrule.bzl @@ -9,6 +9,7 @@ load("@prelude//:cache_mode.bzl", "CacheModeInfo") load("@prelude//:genrule_local_labels.bzl", "genrule_labels_require_local") +load("@prelude//:genrule_prefer_local_labels.bzl", "genrule_labels_prefer_local") load("@prelude//:genrule_toolchain.bzl", "GenruleToolchainInfo") load("@prelude//:is_full_meta_repo.bzl", "is_full_meta_repo") load("@prelude//android:build_only_native_code.bzl", "is_build_only_native_code") @@ -39,6 +40,9 @@ _BUILD_ROOT_LABELS = {label: True for label in [ "app_modules_genrule", # produces JSON containing file paths that are read from the root dir. "android_langpack_strings", # produces JSON containing file paths that are read from the root dir. "windows_long_path_issue", # Windows: relative path length exceeds PATH_MAX, program cannot access file + "flowtype_ota_safety_target", # produces JSON containing file paths that are project-relative + "ctrlr_setting_paths", + "llvm_buck_genrule", ]} # In Buck1 the SRCS environment variable is only set if the substring SRCS is on the command line. @@ -64,6 +68,9 @@ def _requires_build_root(ctx: AnalysisContext) -> bool: def _requires_local(ctx: AnalysisContext) -> bool: return genrule_labels_require_local(ctx.attrs.labels) +def _prefers_local(ctx: AnalysisContext) -> bool: + return genrule_labels_prefer_local(ctx.attrs.labels) + def _ignore_artifacts(ctx: AnalysisContext) -> bool: return "buck2_ignore_artifacts" in ctx.attrs.labels @@ -80,6 +87,7 @@ def genrule_attributes() -> dict[str, Attr]: "metadata_env_var": attrs.option(attrs.string(), default = None), "metadata_path": attrs.option(attrs.string(), default = None), "no_outputs_cleanup": attrs.bool(default = False), + "remote_execution_dependencies": attrs.list(attrs.dict(key = attrs.string(), value = attrs.string()), default = []), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_genrule_toolchain": attrs.default_only(attrs.toolchain_dep(default = "toolchains//:genrule", providers = [GenruleToolchainInfo])), } @@ -132,6 +140,7 @@ def process_genrule( fail("Only one of `out` and `outs` should be set. Got out=`%s`, outs=`%s`" % (repr(out_attr), repr(outs_attr))) local_only = _requires_local(ctx) + prefer_local = _prefers_local(ctx) # NOTE: Eventually we shouldn't require local_only here, since we should be # fine with caching local fallbacks if necessary (or maybe that should be @@ -173,18 +182,20 @@ def process_genrule( cmd = ctx.attrs.bash if ctx.attrs.bash != None else ctx.attrs.cmd if cmd == None: fail("One of `cmd` or `bash` should be set.") - cmd = cmd_args(cmd) + + replace_regex = [] # For backwards compatibility with Buck1. if is_windows: for re, sub in _WINDOWS_ENV_SUBSTITUTIONS: - cmd.replace_regex(re, sub) + replace_regex.append((re, sub)) for extra_env_var in extra_env_vars: - cmd.replace_regex(regex("\\$(%s\\b|\\{%s\\})" % (extra_env_var, extra_env_var)), "%%%s%%" % extra_env_var) + replace_regex.append( + (regex("\\$(%s\\b|\\{%s\\})" % (extra_env_var, extra_env_var)), "%%%s%%" % extra_env_var), + ) - if _ignore_artifacts(ctx): - cmd = cmd.ignore_artifacts() + cmd = cmd_args(cmd, ignore_artifacts = _ignore_artifacts(ctx), replace_regex = replace_regex) if type(ctx.attrs.srcs) == type([]): # FIXME: We should always use the short_path, but currently that is sometimes blank. @@ -227,6 +238,10 @@ def process_genrule( if local_only: env_vars["__BUCK2_LOCAL_ONLY_CACHE_BUSTER"] = cmd_args("") + # see comment above + if prefer_local: + env_vars["__BUCK2_PREFER_LOCAL_CACHE_BUSTER"] = cmd_args("") + # For now, when uploads are enabled, be safe and avoid sharing cache hits. cache_bust = _get_cache_mode(ctx).cache_bust_genrules @@ -283,16 +298,20 @@ def process_genrule( if is_windows: rewrite_scratch_path = cmd_args( - cmd_args(ctx.label.project_root).relative_to(srcs_artifact), + cmd_args(ctx.label.project_root, relative_to = srcs_artifact), format = 'set "BUCK_SCRATCH_PATH={}\\%BUCK_SCRATCH_PATH%"', ) else: srcs_dir = cmd_args(srcs_dir, quote = "shell") rewrite_scratch_path = cmd_args( - cmd_args(ctx.label.project_root, quote = "shell").relative_to(srcs_artifact), + cmd_args(ctx.label.project_root, quote = "shell", relative_to = srcs_artifact), format = "export BUCK_SCRATCH_PATH={}/$BUCK_SCRATCH_PATH", ) + # Relativize all paths in the command to the sandbox dir. + for script_cmd in script: + script_cmd.relative_to(srcs_artifact) + script = ( [ # Rewrite BUCK_SCRATCH_PATH @@ -300,12 +319,14 @@ def process_genrule( # Change to the directory that genrules expect. cmd_args(srcs_dir, format = "cd {}"), ] + - # Relativize all paths in the command to the sandbox dir. - [cmd.relative_to(srcs_artifact) for cmd in script] + script ) # Relative all paths in the env to the sandbox dir. - env_vars = {key: val.relative_to(srcs_artifact) for key, val in env_vars.items()} + env_vars = { + key: cmd_args(value, relative_to = srcs_artifact) + for key, value in env_vars.items() + } if is_windows: # Should be in the beginning. @@ -328,15 +349,19 @@ def process_genrule( metadata_args["metadata_env_var"] = ctx.attrs.metadata_env_var if ctx.attrs.metadata_path: metadata_args["metadata_path"] = ctx.attrs.metadata_path + if ctx.attrs.remote_execution_dependencies: + metadata_args["remote_execution_dependencies"] = ctx.attrs.remote_execution_dependencies category = "genrule" if ctx.attrs.type != None: # As of 09/2021, all genrule types were legal snake case if their dashes and periods were replaced with underscores. category += "_" + ctx.attrs.type.replace("-", "_").replace(".", "_") ctx.actions.run( - cmd_args(script_args).hidden([cmd, srcs_artifact, out_artifact.as_output()] + hidden), + cmd_args(script_args, hidden = [cmd, srcs_artifact, out_artifact.as_output()] + hidden), env = env_vars, local_only = local_only, + prefer_local = prefer_local, + weight = value_or(ctx.attrs.weight, 1), allow_cache_upload = cacheable, category = category, identifier = identifier, diff --git a/prelude/genrule_local_labels.bzl b/prelude/genrule_local_labels.bzl index bea36736f33..927cc5c83ec 100644 --- a/prelude/genrule_local_labels.bzl +++ b/prelude/genrule_local_labels.bzl @@ -94,6 +94,12 @@ _GENRULE_LOCAL_LABELS = {label: True for label in [ # (https://fb.workplace.com/groups/1042353022615812/posts/1849505965233843/). "uses_php", + # Uses the `libX11-devel` package which is not available on RE. + "uses_x11", + + # Unity license client needs to be set up on RE workers for this to work, and maybe further debugging. + "uses_unity", + # mksquashfs isn't available in RE, so run these locally # (https://fb.workplace.com/groups/buck2users/permalink/3023630007893360/) "uses_mksquashfs", @@ -211,6 +217,10 @@ _GENRULE_LOCAL_LABELS = {label: True for label in [ # locally to build on Windows. This is a mitigation until we can break down these # targets "zeratul_windows_capacity_hog", + + # The compilation databases produced by Buck have paths relative to the root of + # fbsource. This isn't compatible with RE. + "uses_compilation_database", ]} def genrule_labels_require_local(labels): diff --git a/prelude/genrule_prefer_local_labels.bzl b/prelude/genrule_prefer_local_labels.bzl new file mode 100644 index 00000000000..7b3229177e5 --- /dev/null +++ b/prelude/genrule_prefer_local_labels.bzl @@ -0,0 +1,22 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Handle labels used to make genrules prefer local execution +""" + +# Some rules prefer to be run locally for various reasons listed next to the label. +_GENRULE_PREFER_LOCAL_LABELS = {label: True for label in [ + # Used for rules that just copy large files and will be faster to do locally + "large_copy", +]} + +def genrule_labels_prefer_local(labels): + for label in labels: + if label in _GENRULE_PREFER_LOCAL_LABELS: + return True + return False diff --git a/prelude/git/tools/BUCK.v2 b/prelude/git/tools/BUCK.v2 index 9135477da01..ce7dcb83cc3 100644 --- a/prelude/git/tools/BUCK.v2 +++ b/prelude/git/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( diff --git a/prelude/go/cgo_builder.bzl b/prelude/go/cgo_builder.bzl new file mode 100644 index 00000000000..6619da057ee --- /dev/null +++ b/prelude/go/cgo_builder.bzl @@ -0,0 +1,182 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:cxx_library.bzl", "cxx_compile_srcs") +load( + "@prelude//cxx:cxx_sources.bzl", + "CxxSrcWithFlags", +) +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_types.bzl", + "CxxRuleConstructorParams", # @unused Used as a type +) +load("@prelude//cxx:headers.bzl", "cxx_attr_header_namespace", "cxx_get_regular_cxx_headers_layout", "prepare_headers") +load( + "@prelude//cxx:preprocessor.bzl", + "CPreprocessor", + "CPreprocessorArgs", + "CPreprocessorInfo", + "cxx_inherited_preprocessor_infos", + "cxx_merge_cpreprocessors", +) +load( + "@prelude//linking:link_info.bzl", + "LinkStyle", +) +load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//os_lookup:defs.bzl", "OsLookup") +load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") +load("@prelude//utils:expect.bzl", "expect") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") + +# A map of expected linkages for provided link style +_LINKAGE_FOR_LINK_STYLE = { + LinkStyle("static"): Linkage("static"), + LinkStyle("static_pic"): Linkage("static"), + LinkStyle("shared"): Linkage("shared"), +} + +CGoToolOut = record( + cgo_gotypes = field(Artifact), # _cgo_gotypes.go + cgo_export_h = field(Artifact), # _cgo_export.h + cgo_export_c = field(Artifact), # _cgo_export.c + cgo1_go_files = field(list[Artifact]), # *.cgo1.go + cgo2_c_files = field(list[Artifact]), # *.cgo2.c +) + +def _cgo( + ctx: AnalysisContext, + srcs: list[Artifact], + own_pre: list[CPreprocessor], + inherited_pre: list[CPreprocessorInfo], + c_flags: list[str], + cpp_flags: list[str]) -> (CGoToolOut, Artifact): + """ + Run `cgo` on `.go` sources to generate Go, C, and C-Header sources. + """ + gen_dir = ctx.actions.declare_output("cgo_gen_tmp", dir = True) + + # Return a `cmd_args` to use as the generated sources. + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + + cmd = cmd_args( + go_toolchain.cgo_wrapper, + cmd_args(go_toolchain.cgo, format = "--cgo={}"), + cmd_args(gen_dir.as_output(), format = "--output={}"), + "--", + c_flags + cpp_flags, + ctx.attrs.cxx_compiler_flags, + srcs, + ) + + env = get_toolchain_env_vars(go_toolchain) + env["CC"] = _cxx_wrapper(ctx, own_pre, inherited_pre) + + ctx.actions.run(cmd, env = env, category = "cgo") + + return project_go_and_c_files(srcs, gen_dir), gen_dir + +def project_go_and_c_files(cgo_srcs: list[Artifact], gen_dir: Artifact) -> CGoToolOut: + return CGoToolOut( + cgo_gotypes = gen_dir.project("_cgo_gotypes.go"), + cgo_export_h = gen_dir.project("_cgo_export.h"), + cgo_export_c = gen_dir.project("_cgo_export.c"), + cgo1_go_files = [gen_dir.project(paths.replace_extension(src.basename, ".cgo1.go")) for src in cgo_srcs], + cgo2_c_files = [gen_dir.project(paths.replace_extension(src.basename, ".cgo2.c")) for src in cgo_srcs], + ) + +def _cxx_wrapper(ctx: AnalysisContext, own_pre: list[CPreprocessor], inherited_pre: list[CPreprocessorInfo]) -> cmd_args: + pre = cxx_merge_cpreprocessors(ctx, own_pre, inherited_pre) + pre_args = pre.set.project_as_args("args") + pre_include_dirs = pre.set.project_as_args("include_dirs") + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + expect(CxxToolchainInfo in ctx.attrs._cxx_toolchain) + cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] + + c_compiler = cxx_toolchain.c_compiler_info + + # Construct the full C/C++ command needed to preprocess/compile sources. + cxx_cmd = cmd_args( + c_compiler.compiler, + c_compiler.preprocessor_flags, + c_compiler.compiler_flags, + pre_args, + pre_include_dirs, + go_toolchain.c_compiler_flags, + ) + + # Wrap the C/C++ command in a wrapper script to avoid arg length limits. + return cmd_script( + ctx = ctx, + name = "cxx_wrapper", + cmd = cxx_cmd, + os = ScriptOs("windows" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "unix"), + ) + +# build CPreprocessor similar as cxx_private_preprocessor_info does, but with our filtered headers +def _own_pre(ctx: AnalysisContext, h_files: list[Artifact]) -> CPreprocessor: + namespace = cxx_attr_header_namespace(ctx) + header_map = {paths.join(namespace, h.short_path): h for h in h_files} + header_root = prepare_headers(ctx, header_map, "h_files-private-headers") + + return CPreprocessor( + args = CPreprocessorArgs(args = ["-I", header_root.include_path] if header_root != None else []), + ) + +def build_cgo(ctx: AnalysisContext, cgo_files: list[Artifact], h_files: list[Artifact], c_files: list[Artifact], c_flags: list[str], cpp_flags: list[str]) -> (list[Artifact], list[Artifact], Artifact): + if len(cgo_files) == 0: + return [], [], ctx.actions.copied_dir("cgo_gen_tmp", {}) + + # Gather preprocessor inputs. + own_pre = _own_pre(ctx, h_files) + inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) + + # Separate sources into C++ and GO sources. + cgo_tool_out, gen_dir = _cgo(ctx, cgo_files, [own_pre], inherited_pre, c_flags, cpp_flags) + go_gen_srcs = [cgo_tool_out.cgo_gotypes] + cgo_tool_out.cgo1_go_files + c_gen_headers = [cgo_tool_out.cgo_export_h] + c_gen_srcs = [cgo_tool_out.cgo_export_c] + cgo_tool_out.cgo2_c_files + + # Wrap the generated CGO C headers in a CPreprocessor object for compiling. + cgo_headers_pre = CPreprocessor(args = CPreprocessorArgs(args = [ + "-I", + prepare_headers( + ctx, + {h.basename: h for h in c_gen_headers}, + "cgo-private-headers", + ).include_path, + ])) + + link_style = ctx.attrs.link_style + if link_style == None: + link_style = "static" + linkage = _LINKAGE_FOR_LINK_STYLE[LinkStyle(link_style)] + + # Copmile C++ sources into object files. + c_compile_cmds = cxx_compile_srcs( + ctx, + CxxRuleConstructorParams( + rule_type = "cgo_sources", + headers_layout = cxx_get_regular_cxx_headers_layout(ctx), + srcs = [CxxSrcWithFlags(file = src) for src in c_files + c_gen_srcs], + compiler_flags = c_flags + ctx.attrs.cxx_compiler_flags, + preprocessor_flags = cpp_flags + ctx.attrs.cxx_preprocessor_flags, + ), + # Create private header tree and propagate via args. + [own_pre, cgo_headers_pre], + inherited_pre, + [], + linkage, + False, # add_coverage_instrumentation_compiler_flags + ) + + compiled_objects = c_compile_cmds.pic.objects + + return go_gen_srcs, compiled_objects, gen_dir diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl deleted file mode 100644 index 0b2e3fc5178..00000000000 --- a/prelude/go/cgo_library.bzl +++ /dev/null @@ -1,271 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -load("@prelude//:paths.bzl", "paths") -load( - "@prelude//apple:xcode.bzl", - "get_project_root_file", -) -load( - "@prelude//cxx:compile.bzl", - "CxxSrcWithFlags", # @unused Used as a type -) -load("@prelude//cxx:cxx_library.bzl", "cxx_compile_srcs") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") -load( - "@prelude//cxx:cxx_types.bzl", - "CxxRuleConstructorParams", # @unused Used as a type -) -load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout", "prepare_headers") -load( - "@prelude//cxx:preprocessor.bzl", - "CPreprocessor", - "CPreprocessorArgs", - "CPreprocessorInfo", - "cxx_inherited_preprocessor_infos", - "cxx_merge_cpreprocessors", - "cxx_private_preprocessor_info", -) -load( - "@prelude//linking:link_info.bzl", - "LinkStyle", - "Linkage", - "MergedLinkInfo", - "create_merged_link_info_for_propagation", -) -load( - "@prelude//linking:shared_libraries.bzl", - "SharedLibraryInfo", - "merge_shared_libraries", -) -load("@prelude//os_lookup:defs.bzl", "OsLookup") -load("@prelude//utils:expect.bzl", "expect") -load( - "@prelude//utils:utils.bzl", - "map_idx", -) -load(":compile.bzl", "GoPkgCompileInfo", "compile", "get_filtered_srcs", "get_inherited_compile_pkgs") -load(":coverage.bzl", "GoCoverageMode") -load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") -load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") -load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") - -# A map of expected linkages for provided link style -_LINKAGE_FOR_LINK_STYLE = { - LinkStyle("static"): Linkage("static"), - LinkStyle("static_pic"): Linkage("static"), - LinkStyle("shared"): Linkage("shared"), -} - -def _cgo( - ctx: AnalysisContext, - srcs: list[Artifact], - own_pre: list[CPreprocessor], - inherited_pre: list[CPreprocessorInfo]) -> (list[Artifact], list[Artifact], list[Artifact]): - """ - Run `cgo` on `.go` sources to generate Go, C, and C-Header sources. - """ - - pre = cxx_merge_cpreprocessors(ctx, own_pre, inherited_pre) - pre_args = pre.set.project_as_args("args") - pre_include_dirs = pre.set.project_as_args("include_dirs") - - # If you change this dir or naming convention, please - # update the corresponding logic in `fbgolist`. - # Otherwise editing and linting for Go will break. - gen_dir = "cgo_gen" - - go_srcs = [] - c_headers = [] - c_srcs = [] - go_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, "_cgo_gotypes.go"))) - c_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, "_cgo_export.c"))) - c_headers.append(ctx.actions.declare_output(paths.join(gen_dir, "_cgo_export.h"))) - for src in srcs: - go_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, paths.replace_extension(src.basename, ".cgo1.go")))) - c_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, paths.replace_extension(src.basename, ".cgo2.c")))) - - # Return a `cmd_args` to use as the generated sources. - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - expect(go_toolchain.cgo != None) - expect(CxxToolchainInfo in ctx.attrs._cxx_toolchain) - cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] - - cmd = get_toolchain_cmd_args(go_toolchain, go_root = False) - cmd.add(go_toolchain.cgo_wrapper) - - args = cmd_args() - args.add(cmd_args(go_toolchain.cgo, format = "--cgo={}")) - - c_compiler = cxx_toolchain.c_compiler_info - # linker = cxx_toolchain.linker_info - - # Passing fbcode-platform ldflags may create S365277, so I would - # comment this change until we really need to do it. - # ldflags = cmd_args( - # linker.linker_flags, - # go_toolchain.external_linker_flags, - # ) - - # Construct the full C/C++ command needed to preprocess/compile sources. - cxx_cmd = cmd_args() - cxx_cmd.add(c_compiler.compiler) - cxx_cmd.add(c_compiler.preprocessor_flags) - cxx_cmd.add(c_compiler.compiler_flags) - cxx_cmd.add(pre_args) - cxx_cmd.add(pre_include_dirs) - cxx_cmd.add(go_toolchain.c_compiler_flags) - - # Wrap the C/C++ command in a wrapper script to avoid arg length limits. - is_win = ctx.attrs._exec_os_type[OsLookup].platform == "windows" - cxx_sh = cmd_args( - [ - cmd_args(cxx_cmd, quote = "shell"), - "%*" if is_win else "\"$@\"", - ], - delimiter = " ", - ) - cxx_wrapper, _ = ctx.actions.write( - "__{}_cxx__.{}".format(ctx.label.name, "bat" if is_win else "sh"), - ([] if is_win else ["#!/bin/sh"]) + [cxx_sh], - allow_args = True, - is_executable = True, - ) - args.add(cmd_args(cxx_wrapper, format = "--env-cc={}")) - args.hidden(cxx_cmd) - - # TODO(agallagher): cgo outputs a dir with generated sources, but I'm not - # sure how to pass in an output dir *and* enumerate the sources we know will - # generated w/o v2 complaining that the output dir conflicts with the nested - # artifacts. - args.add(cmd_args(go_srcs[0].as_output(), format = "--output={}/..")) - - args.add(srcs) - - argsfile = ctx.actions.declare_output(paths.join(gen_dir, ".cgo.argsfile")) - ctx.actions.write(argsfile.as_output(), args, allow_args = True) - - cmd.add(cmd_args(argsfile, format = "@{}").hidden([args])) - - for src in go_srcs + c_headers + c_srcs: - cmd.hidden(src.as_output()) - ctx.actions.run(cmd, category = "cgo") - - return go_srcs, c_headers, c_srcs - -def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: - pkg_name = go_attr_pkg_name(ctx) - - project_root_file = get_project_root_file(ctx) - - # Gather preprocessor inputs. - (own_pre, _) = cxx_private_preprocessor_info( - ctx, - cxx_get_regular_cxx_headers_layout(ctx), - project_root_file = project_root_file, - ) - inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) - - # Separate sources into C++ and CGO sources. - cgo_srcs = [] - cxx_srcs = [] - for src in ctx.attrs.srcs: - if src.extension == ".go": - cgo_srcs.append(src) - elif src.extension in (".c", ".cpp"): - cxx_srcs.append(src) - else: - fail("unexpected extension: {}".format(src)) - - # Generate CGO and C sources. - go_srcs, c_headers, c_srcs = _cgo(ctx, cgo_srcs, [own_pre], inherited_pre) - cxx_srcs.extend(c_srcs) - - # Wrap the generated CGO C headers in a CPreprocessor object for compiling. - cgo_headers_pre = CPreprocessor(relative_args = CPreprocessorArgs(args = [ - "-I", - prepare_headers( - ctx, - {h.basename: h for h in c_headers}, - "cgo-private-headers", - None, - ).include_path, - ])) - - link_style = ctx.attrs.link_style - if link_style == None: - link_style = "static" - linkage = _LINKAGE_FOR_LINK_STYLE[LinkStyle(link_style)] - - # Copmile C++ sources into object files. - c_compile_cmds = cxx_compile_srcs( - ctx, - CxxRuleConstructorParams( - rule_type = "cgo_library", - headers_layout = cxx_get_regular_cxx_headers_layout(ctx), - srcs = [CxxSrcWithFlags(file = src) for src in cxx_srcs], - ), - # Create private header tree and propagate via args. - [own_pre, cgo_headers_pre], - inherited_pre, - [], - linkage, - ) - - compiled_objects = c_compile_cmds.pic.objects - - # Merge all sources together to pass to the Go compile step. - all_srcs = cmd_args(go_srcs + compiled_objects) - if ctx.attrs.go_srcs: - all_srcs.add(get_filtered_srcs(ctx, ctx.attrs.go_srcs)) - - shared = ctx.attrs._compile_shared - race = ctx.attrs._race - coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None - - # Build Go library. - compiled_pkg = compile( - ctx, - pkg_name, - all_srcs, - deps = ctx.attrs.deps + ctx.attrs.exported_deps, - shared = shared, - race = race, - coverage_mode = coverage_mode, - ) - - # Temporarily hack, it seems like we can update record, so create new one - compiled_pkg = GoPkg( - cgo = True, - pkg = compiled_pkg.pkg, - coverage_vars = compiled_pkg.coverage_vars, - ) - - pkgs = { - pkg_name: compiled_pkg, - } - - # We need to keep pre-processed cgo source files, - # because they are required for any editing and linting (like VSCode+gopls) - # to work with cgo. And when nearly every FB service client is cgo, - # we need to support it well. - return [ - DefaultInfo(default_output = compiled_pkg.pkg, other_outputs = go_srcs), - GoPkgCompileInfo(pkgs = merge_pkgs([ - pkgs, - get_inherited_compile_pkgs(ctx.attrs.exported_deps), - ])), - GoPkgLinkInfo(pkgs = merge_pkgs([ - pkgs, - get_inherited_link_pkgs(ctx.attrs.deps + ctx.attrs.exported_deps), - ])), - create_merged_link_info_for_propagation(ctx, filter(None, [d.get(MergedLinkInfo) for d in ctx.attrs.deps])), - merge_shared_libraries( - ctx.actions, - deps = filter(None, map_idx(SharedLibraryInfo, ctx.attrs.deps)), - ), - ] diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index 86f7f71333c..e3038756c0e 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -6,19 +6,11 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") - -# @unused this comment is to make the linter happy. The linter thinks -# GoCoverageMode is unused despite it being used in the function signature of -# multiple functions. -load(":coverage.bzl", "GoCoverageMode", "cover_srcs") load( ":packages.bzl", "GoPkg", # @Unused used as type - "make_importcfg", "merge_pkgs", - "pkg_artifacts", ) -load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") # Provider wrapping packages used for compiling. GoPkgCompileInfo = provider(fields = { @@ -39,131 +31,15 @@ GoTestInfo = provider( def get_inherited_compile_pkgs(deps: list[Dependency]) -> dict[str, GoPkg]: return merge_pkgs([d[GoPkgCompileInfo].pkgs for d in deps if GoPkgCompileInfo in d]) -def get_filtered_srcs(ctx: AnalysisContext, srcs: list[Artifact], tests: bool = False, force_disable_cgo: bool = False) -> cmd_args: - """ - Filter the input sources based on build pragma - """ - - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - - # Delegate to `go list` to filter out srcs with incompatible `// +build` - # pragmas. - filtered_srcs = ctx.actions.declare_output("__filtered_srcs__.txt") - srcs_dir = ctx.actions.symlinked_dir( - "__srcs__", - {src.short_path: src for src in srcs}, - ) - filter_cmd = get_toolchain_cmd_args(go_toolchain, go_root = True, force_disable_cgo = force_disable_cgo) - filter_cmd.add(go_toolchain.filter_srcs) - filter_cmd.add(cmd_args(go_toolchain.go, format = "--go={}")) - if tests: - filter_cmd.add("--tests") - filter_cmd.add(cmd_args(",".join(go_toolchain.tags + ctx.attrs._tags), format = "--tags={}")) - filter_cmd.add(cmd_args(filtered_srcs.as_output(), format = "--output={}")) - filter_cmd.add(srcs_dir) - ctx.actions.run(filter_cmd, category = "go_filter_srcs") - - # Add filtered srcs to compile command. - return cmd_args(filtered_srcs, format = "@{}").hidden(srcs).hidden(srcs_dir) - -def _assemble_cmd( - ctx: AnalysisContext, - pkg_name: str, - flags: list[str] = [], - shared: bool = False) -> cmd_args: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - cmd = cmd_args() - cmd.add(go_toolchain.assembler) - cmd.add(go_toolchain.assembler_flags) - cmd.add(flags) - cmd.add("-p", pkg_name) - if shared: - cmd.add("-shared") - - return cmd - -def _compile_cmd( - ctx: AnalysisContext, - pkg_name: str, - pkgs: dict[str, Artifact] = {}, - deps: list[Dependency] = [], - flags: list[str] = [], - shared: bool = False, - race: bool = False) -> cmd_args: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - - cmd = cmd_args() - cmd.add(go_toolchain.compiler) - cmd.add(go_toolchain.compiler_flags) - cmd.add("-p", pkg_name) - cmd.add("-pack") - cmd.add("-nolocalimports") - cmd.add(flags) - cmd.add("-buildid=") - - # Add shared/static flags. - if shared: - cmd.add("-shared") - - if race: - cmd.add("-race") - - # Add Go pkgs inherited from deps to compiler search path. - all_pkgs = merge_pkgs([ - pkgs, - pkg_artifacts(get_inherited_compile_pkgs(deps)), - ]) - - importcfg = make_importcfg(ctx, pkg_name, all_pkgs, with_importmap = True) - - cmd.add("-importcfg", importcfg) - - return cmd - -def compile( - ctx: AnalysisContext, - pkg_name: str, - srcs: cmd_args, - pkgs: dict[str, Artifact] = {}, - deps: list[Dependency] = [], - compile_flags: list[str] = [], - assemble_flags: list[str] = [], - shared: bool = False, - race: bool = False, - coverage_mode: GoCoverageMode | None = None) -> GoPkg: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - output = ctx.actions.declare_output(paths.basename(pkg_name) + ".a") - - cmd = get_toolchain_cmd_args(go_toolchain) - cmd.add(go_toolchain.compile_wrapper) - cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(_compile_cmd(ctx, pkg_name, pkgs, deps, compile_flags, shared = shared, race = race), format = "--compiler={}")) - cmd.add(cmd_args(_assemble_cmd(ctx, pkg_name, assemble_flags, shared = shared), format = "--assembler={}")) - cmd.add(cmd_args(go_toolchain.packer, format = "--packer={}")) - if ctx.attrs.embedcfg: - cmd.add(cmd_args(ctx.attrs.embedcfg, format = "--embedcfg={}")) - - argsfile = ctx.actions.declare_output(pkg_name + ".go.argsfile") - - coverage_vars = None - if coverage_mode != None: - if race and coverage_mode != GoCoverageMode("atomic"): - fail("`coverage_mode` must be `atomic` when `race=True`") - cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs, shared) - srcs = cov_res.srcs - coverage_vars = cov_res.variables - - srcs_args = cmd_args(srcs) - ctx.actions.write(argsfile.as_output(), srcs_args, allow_args = True) - - cmd.add(cmd_args(argsfile, format = "@{}").hidden([srcs_args])) - - identifier = paths.basename(pkg_name) - if shared: - identifier += "[shared]" - if coverage_mode: - identifier += "[coverage_" + coverage_mode.value + "]" - - ctx.actions.run(cmd, category = "go_compile", identifier = identifier) - - return GoPkg(pkg = output, coverage_vars = coverage_vars) +def infer_package_root(srcs: list[Artifact]) -> str: + go_sources = [s for s in srcs if s.extension == ".go"] + if len(go_sources) == 0: + return "" + dir_set = {paths.dirname(s.short_path): None for s in go_sources} + if len(dir_set) > 1: + fail("Provide `package_root` target attribute. Can't infer it when there are multiple directories containing .go files: {}. Sources: {}".format( + dir_set.keys(), + [s.short_path for s in go_sources], + )) + + return dir_set.keys()[0] diff --git a/prelude/go/constraints/BUCK.v2 b/prelude/go/constraints/BUCK.v2 index e19d482430f..2eea56458ce 100644 --- a/prelude/go/constraints/BUCK.v2 +++ b/prelude/go/constraints/BUCK.v2 @@ -1,16 +1,14 @@ -load(":defs.bzl", "generate_tag_constraints") +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() constraint_setting( name = "cgo_enabled", visibility = ["PUBLIC"], ) -constraint_value( - name = "cgo_enabled_auto", - constraint_setting = ":cgo_enabled", - visibility = ["PUBLIC"], -) - constraint_value( name = "cgo_enabled_true", constraint_setting = ":cgo_enabled", @@ -57,6 +55,23 @@ constraint_value( visibility = ["PUBLIC"], ) +constraint_setting( + name = "asan", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "asan_false", + constraint_setting = ":race", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "asan_true", + constraint_setting = ":race", + visibility = ["PUBLIC"], +) + constraint_setting( name = "coverage_mode", visibility = ["PUBLIC"], @@ -79,5 +94,3 @@ constraint_value( constraint_setting = ":coverage_mode", visibility = ["PUBLIC"], ) - -generate_tag_constraints() diff --git a/prelude/go/coverage.bzl b/prelude/go/coverage.bzl index 872ee437d5c..cf6ab2808d6 100644 --- a/prelude/go/coverage.bzl +++ b/prelude/go/coverage.bzl @@ -44,6 +44,6 @@ def cover_srcs(ctx: AnalysisContext, pkg_name: str, mode: GoCoverageMode, srcs: ctx.actions.run(cmd, category = "go_cover", identifier = path) return GoCoverResult( - srcs = cmd_args(out_srcs_argsfile, format = "@{}").hidden(out_covered_src_dir).hidden(srcs), + srcs = cmd_args(out_srcs_argsfile, format = "@{}", hidden = [out_covered_src_dir, srcs]), variables = cmd_args(out_coverage_vars_argsfile, format = "@{}"), ) diff --git a/prelude/go/go_binary.bzl b/prelude/go/go_binary.bzl index f5ee6c7ac15..06309180998 100644 --- a/prelude/go/go_binary.bzl +++ b/prelude/go/go_binary.bzl @@ -16,26 +16,36 @@ load( "map_val", "value_or", ) -load(":compile.bzl", "compile", "get_filtered_srcs") load(":link.bzl", "link") +load(":package_builder.bzl", "build_package") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: - lib = compile( + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + + lib, pkg_info = build_package( ctx, "main", - get_filtered_srcs(ctx, ctx.attrs.srcs), + ctx.attrs.srcs, + package_root = ctx.attrs.package_root, deps = ctx.attrs.deps, - compile_flags = ctx.attrs.compiler_flags, + compiler_flags = ctx.attrs.compiler_flags, + tags = ctx.attrs._tags, race = ctx.attrs._race, + asan = ctx.attrs._asan, + embedcfg = ctx.attrs.embedcfg, + cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs.cgo_enabled), ) (bin, runtime_files, external_debug_info) = link( ctx, - lib.pkg, + lib, deps = ctx.attrs.deps, link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, link_mode = ctx.attrs.link_mode, race = ctx.attrs._race, + asan = ctx.attrs._asan, + external_linker_flags = ctx.attrs.external_linker_flags, ) # runtime_files are all the artifacts that must be present in order for this @@ -62,6 +72,7 @@ def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: default_output = bin, other_outputs = other_outputs, ), - RunInfo(args = cmd_args(bin).hidden(other_outputs)), + RunInfo(args = cmd_args(bin, hidden = other_outputs)), DistInfo(nondebug_runtime_files = runtime_files), + pkg_info, ] diff --git a/prelude/go/go_exported_library.bzl b/prelude/go/go_exported_library.bzl index 5210f174224..3c2721fed3b 100644 --- a/prelude/go/go_exported_library.bzl +++ b/prelude/go/go_exported_library.bzl @@ -5,42 +5,154 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:linker.bzl", "get_default_shared_library_name") +load( + "@prelude//cxx:preprocessor.bzl", + "cxx_inherited_preprocessor_infos", + "cxx_merge_cpreprocessors", +) +load( + "@prelude//linking:link_groups.bzl", + "merge_link_group_lib_info", +) load( "@prelude//linking:link_info.bzl", + "Archive", + "ArchiveLinkable", + "LibOutputStyle", + "LinkInfo", + "LinkInfos", "LinkStyle", + "LinkedObject", + "MergedLinkInfo", # @unused Used as a type + "SharedLibLinkable", + "create_merged_link_info", +) +load( + "@prelude//linking:linkable_graph.bzl", + "create_linkable_graph", + "create_linkable_graph_node", + "create_linkable_node", +) +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraries", + "SharedLibraryInfo", + "create_shlib", + "merge_shared_libraries", ) load( "@prelude//utils:utils.bzl", + "map_idx", "map_val", "value_or", ) -load(":compile.bzl", "compile", "get_filtered_srcs") load(":link.bzl", "GoBuildMode", "link") +load(":package_builder.bzl", "build_package") +load(":packages.bzl", "cgo_exported_preprocessor") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: - lib = compile( + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + lib, pkg_info = build_package( ctx, "main", - get_filtered_srcs(ctx, ctx.attrs.srcs), + ctx.attrs.srcs, + package_root = ctx.attrs.package_root, deps = ctx.attrs.deps, - compile_flags = ctx.attrs.compiler_flags, - shared = True, - race = ctx.attrs._race, - ) - (bin, runtime_files, _external_debug_info) = link( - ctx, - lib.pkg, - deps = ctx.attrs.deps, - build_mode = GoBuildMode(ctx.attrs.build_mode), - link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static_pic")), - linker_flags = ctx.attrs.linker_flags, - external_linker_flags = ctx.attrs.external_linker_flags, - shared = True, + compiler_flags = ctx.attrs.compiler_flags, + tags = ctx.attrs._tags, race = ctx.attrs._race, + asan = ctx.attrs._asan, + embedcfg = ctx.attrs.embedcfg, + cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs.cgo_enabled), ) + + def link_variant(build_mode: GoBuildMode): + (exp_lib, _, _) = link( + ctx, + lib, + deps = ctx.attrs.deps, + build_mode = build_mode, + link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static_pic")), + linker_flags = ctx.attrs.linker_flags, + external_linker_flags = ctx.attrs.external_linker_flags, + race = ctx.attrs._race, + asan = ctx.attrs._asan, + ) + return exp_lib + + c_archive = link_variant(GoBuildMode("c_archive")) # .a - PIC-arcive + c_shared = link_variant(GoBuildMode("c_shared")) # .so - PIC-shared_lib + + cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] + + soname = get_default_shared_library_name(cxx_toolchain.linker_info, ctx.label) + + link_infos = { + LibOutputStyle("archive"): LinkInfos( + default = LinkInfo(linkables = [ArchiveLinkable( + archive = Archive(artifact = c_archive), + linker_type = cxx_toolchain.linker_info.type, + )]), + ), + LibOutputStyle("pic_archive"): LinkInfos( + default = LinkInfo(linkables = [ArchiveLinkable( + archive = Archive(artifact = c_archive), + linker_type = cxx_toolchain.linker_info.type, + )]), + ), + LibOutputStyle("shared_lib"): LinkInfos( + default = LinkInfo(linkables = [SharedLibLinkable( + lib = c_shared, + )]), + ), + } + + shared_libs = SharedLibraries(libraries = [ + create_shlib( + soname = soname, + label = ctx.label, + lib = LinkedObject( + output = c_shared, + unstripped_output = c_shared, + ), + ), + ]) + + own_exported_preprocessors = [cgo_exported_preprocessor(ctx, pkg_info)] if ctx.attrs.generate_exported_header else [] + return [ DefaultInfo( - default_output = bin, - other_outputs = runtime_files, + default_output = c_archive if ctx.attrs.build_mode == "c_archive" else c_shared, + ), + create_merged_link_info( + ctx, + cxx_toolchain.pic_behavior, + link_infos = link_infos, + deps = filter(None, map_idx(MergedLinkInfo, ctx.attrs.deps)), + ), + merge_shared_libraries( + ctx.actions, + node = shared_libs, + deps = filter(None, map_idx(SharedLibraryInfo, ctx.attrs.deps)), + ), + merge_link_group_lib_info(deps = ctx.attrs.deps), + create_linkable_graph( + ctx, + node = create_linkable_graph_node( + ctx, + linkable_node = create_linkable_node( + ctx, + default_soname = soname, + deps = ctx.attrs.deps, + link_infos = link_infos, + shared_libs = shared_libs, + ), + ), + deps = ctx.attrs.deps, ), + cxx_merge_cpreprocessors(ctx, own_exported_preprocessors, cxx_inherited_preprocessor_infos(ctx.attrs.deps)), + pkg_info, ] diff --git a/prelude/go/go_library.bzl b/prelude/go/go_library.bzl index 00309adaffb..9a1f051df56 100644 --- a/prelude/go/go_library.bzl +++ b/prelude/go/go_library.bzl @@ -5,15 +5,24 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load( + "@prelude//cxx:preprocessor.bzl", + "cxx_inherited_preprocessor_infos", + "cxx_merge_cpreprocessors", +) load( "@prelude//linking:link_groups.bzl", - "LinkGroupLibInfo", + "merge_link_group_lib_info", ) load( "@prelude//linking:link_info.bzl", "MergedLinkInfo", "create_merged_link_info_for_propagation", ) +load( + "@prelude//linking:linkable_graph.bzl", + "create_linkable_graph", +) load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", @@ -23,49 +32,50 @@ load( "@prelude//utils:utils.bzl", "map_idx", ) -load(":compile.bzl", "GoPkgCompileInfo", "GoTestInfo", "compile", "get_filtered_srcs", "get_inherited_compile_pkgs") +load(":compile.bzl", "GoPkgCompileInfo", "GoTestInfo") load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") -load(":packages.bzl", "go_attr_pkg_name", "merge_pkgs") +load(":package_builder.bzl", "build_package") +load(":packages.bzl", "cgo_exported_preprocessor", "go_attr_pkg_name", "merge_pkgs") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") def go_library_impl(ctx: AnalysisContext) -> list[Provider]: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + pkgs = {} - default_output = None - pkg_name = None - if ctx.attrs.srcs: - pkg_name = go_attr_pkg_name(ctx) + pkg_name = go_attr_pkg_name(ctx) - # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. - srcs = get_filtered_srcs(ctx, ctx.attrs.srcs, force_disable_cgo = True) - shared = ctx.attrs._compile_shared - race = ctx.attrs._race - coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None + race = ctx.attrs._race + asan = ctx.attrs._asan + coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None - pkg = compile( - ctx, - pkg_name, - srcs = srcs, - deps = ctx.attrs.deps + ctx.attrs.exported_deps, - compile_flags = ctx.attrs.compiler_flags, - assemble_flags = ctx.attrs.assembler_flags, - shared = shared, - race = race, - coverage_mode = coverage_mode, - ) + pkg, pkg_info = build_package( + ctx, + pkg_name, + srcs = ctx.attrs.srcs + ctx.attrs.headers, + package_root = ctx.attrs.package_root, + deps = ctx.attrs.deps, + compiler_flags = ctx.attrs.compiler_flags, + assembler_flags = ctx.attrs.assembler_flags, + tags = ctx.attrs._tags, + race = race, + asan = asan, + coverage_mode = coverage_mode, + embedcfg = ctx.attrs.embedcfg, + cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs._cgo_enabled, ctx.attrs.override_cgo_enabled), + ) - default_output = pkg.pkg - pkgs[pkg_name] = pkg + default_output = pkg.pkg + pkgs[pkg_name] = pkg + + own_exported_preprocessors = [cgo_exported_preprocessor(ctx, pkg_info)] if ctx.attrs.generate_exported_header else [] return [ DefaultInfo(default_output = default_output), - LinkGroupLibInfo(libs = {}), - GoPkgCompileInfo(pkgs = merge_pkgs([ - pkgs, - get_inherited_compile_pkgs(ctx.attrs.exported_deps), - ])), + GoPkgCompileInfo(pkgs = pkgs), GoPkgLinkInfo(pkgs = merge_pkgs([ pkgs, - get_inherited_link_pkgs(ctx.attrs.deps + ctx.attrs.exported_deps), + get_inherited_link_pkgs(ctx.attrs.deps), ])), GoTestInfo( deps = ctx.attrs.deps, @@ -77,4 +87,11 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions, deps = filter(None, map_idx(SharedLibraryInfo, ctx.attrs.deps)), ), + merge_link_group_lib_info(deps = ctx.attrs.deps), + create_linkable_graph( + ctx, + deps = ctx.attrs.deps, + ), + cxx_merge_cpreprocessors(ctx, own_exported_preprocessors, cxx_inherited_preprocessor_infos(ctx.attrs.deps)), + pkg_info, ] diff --git a/prelude/go/go_list.bzl b/prelude/go/go_list.bzl new file mode 100644 index 00000000000..37bafdb6f33 --- /dev/null +++ b/prelude/go/go_list.bzl @@ -0,0 +1,132 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") + +GoListOut = record( + name = field(str), + imports = field(list[str], default = []), + test_imports = field(list[str], default = []), + x_test_imports = field(list[str], default = []), + go_files = field(list[Artifact], default = []), + h_files = field(list[Artifact], default = []), + c_files = field(list[Artifact], default = []), + cxx_files = field(list[Artifact], default = []), + cgo_files = field(list[Artifact], default = []), + s_files = field(list[Artifact], default = []), + test_go_files = field(list[Artifact], default = []), + x_test_go_files = field(list[Artifact], default = []), + ignored_go_files = field(list[Artifact], default = []), + ignored_other_files = field(list[Artifact], default = []), + embed_files = field(list[Artifact], default = []), + cgo_cflags = field(list[str], default = []), + cgo_cppflags = field(list[str], default = []), +) + +def go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_root: str, tags: list[str], cgo_enabled: bool, with_tests: bool, asan: bool) -> Artifact: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env = get_toolchain_env_vars(go_toolchain) + env["GO111MODULE"] = "off" + env["CGO_ENABLED"] = "1" if cgo_enabled else "0" + + go_list_out = ctx.actions.declare_output(paths.basename(pkg_name) + "_go_list.json") + + # Create file structure that `go list` can recognize + # Use copied_dir, because embed doesn't work with symlinks + srcs_dir = ctx.actions.copied_dir( + "__{}_srcs_dir__".format(paths.basename(pkg_name)), + {src.short_path.removeprefix(package_root).lstrip("/"): src for src in srcs}, + ) + all_tags = [] + go_toolchain.tags + tags + if asan: + all_tags.append("asan") + + required_felds = "Name,Imports,GoFiles,CgoFiles,HFiles,CFiles,CXXFiles,SFiles,EmbedFiles,CgoCFLAGS,CgoCPPFLAGS,IgnoredGoFiles,IgnoredOtherFiles" + if with_tests: + required_felds += ",TestImports,XTestImports,TestGoFiles,XTestGoFiles" + + go_list_args = [ + go_toolchain.go_wrapper, + ["--go", go_toolchain.go], + ["--workdir", srcs_dir], + ["--output", go_list_out.as_output()], + "list", + "-e", + "-json=" + required_felds, + ["-tags", ",".join(all_tags) if all_tags else []], + ".", + ] + + identifier = paths.basename(pkg_name) + ctx.actions.run(go_list_args, env = env, category = "go_list", identifier = identifier) + + return go_list_out + +def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: ArtifactValue) -> GoListOut: + go_list = go_list_out.read_json() + go_files, cgo_files, h_files, c_files, cxx_files, s_files, test_go_files, x_test_go_files, ignored_go_files, ignored_other_files, embed_files = [], [], [], [], [], [], [], [], [], [], [] + + for src in srcs: + # remove package_root prefix from src artifact path to match `go list` output format + src_path = src.short_path.removeprefix(package_root).lstrip("/") + if src_path in go_list.get("GoFiles", []): + go_files.append(src) + if src_path in go_list.get("CgoFiles", []): + cgo_files.append(src) + if src_path in go_list.get("HFiles", []): + h_files.append(src) + if src_path in go_list.get("CFiles", []): + c_files.append(src) + if src_path in go_list.get("CXXFiles", []): + cxx_files.append(src) + if src_path in go_list.get("SFiles", []): + s_files.append(src) + if src_path in go_list.get("TestGoFiles", []): + test_go_files.append(src) + if src_path in go_list.get("XTestGoFiles", []): + x_test_go_files.append(src) + if src_path in go_list.get("IgnoredGoFiles", []): + ignored_go_files.append(src) + if src_path in go_list.get("IgnoredOtherFiles", []): + ignored_other_files.append(src) + if _any_starts_with(go_list.get("EmbedFiles", []), src_path): + embed_files.append(src) + + name = go_list.get("Name", "") + imports = go_list.get("Imports", []) + test_imports = go_list.get("TestImports", []) + x_test_imports = go_list.get("XTestImports", []) + cgo_cflags = go_list.get("CgoCFLAGS", []) + cgo_cppflags = go_list.get("CgoCPPFLAGS", []) + + return GoListOut( + name = name, + imports = imports, + test_imports = test_imports, + x_test_imports = x_test_imports, + go_files = go_files, + h_files = h_files, + c_files = c_files, + cxx_files = cxx_files, + cgo_files = cgo_files, + s_files = s_files, + test_go_files = test_go_files, + x_test_go_files = x_test_go_files, + embed_files = embed_files, + cgo_cflags = cgo_cflags, + cgo_cppflags = cgo_cppflags, + ignored_go_files = ignored_go_files, + ignored_other_files = ignored_other_files, + ) + +def _any_starts_with(files: list[str], path: str): + for file in files: + if paths.starts_with(file, path): + return True + + return False diff --git a/prelude/go/go_stdlib.bzl b/prelude/go/go_stdlib.bzl index 49414dc7ce9..afcf0794481 100644 --- a/prelude/go/go_stdlib.bzl +++ b/prelude/go/go_stdlib.bzl @@ -5,73 +5,86 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:target_sdk_version.bzl", "get_target_sdk_version_flags") load(":packages.bzl", "GoStdlib") -load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled", "get_toolchain_cmd_args") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled", "get_toolchain_env_vars") def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - stdlib_pkgdir = ctx.actions.declare_output("stdlib_pkgdir", dir = True) cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs._cgo_enabled) - tags = go_toolchain.tags + tags = [] + go_toolchain.tags linker_flags = [] + go_toolchain.linker_flags assembler_flags = [] + go_toolchain.assembler_flags compiler_flags = [] + go_toolchain.compiler_flags compiler_flags += ["-buildid="] # Make builds reproducible. - if ctx.attrs._compile_shared: - assembler_flags += ["-shared"] - compiler_flags += ["-shared"] - go_wrapper_args = [] - cxx_toolchain = go_toolchain.cxx_toolchain_for_linking - if cxx_toolchain != None: - c_compiler = cxx_toolchain.c_compiler_info + if ctx.attrs._asan: + compiler_flags += ["-asan"] + tags += ["asan"] - cgo_ldflags = cmd_args( - cxx_toolchain.linker_info.linker_flags, - go_toolchain.external_linker_flags, - ) + env = get_toolchain_env_vars(go_toolchain) + env["GODEBUG"] = "installgoroot=all" + env["CGO_ENABLED"] = "1" if cgo_enabled else "0" - go_wrapper_args += [ - cmd_args(c_compiler.compiler, format = "--cc={}").absolute_prefix("%cwd%/"), - cmd_args([c_compiler.compiler_flags, go_toolchain.c_compiler_flags], format = "--cgo_cflags={}").absolute_prefix("%cwd%/"), - cmd_args(c_compiler.preprocessor_flags, format = "--cgo_cppflags={}").absolute_prefix("%cwd%/"), - cmd_args(cgo_ldflags, format = "--cgo_ldflags={}").absolute_prefix("%cwd%/"), - ] + cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] + if cgo_enabled and cxx_toolchain != None: + c_compiler = cxx_toolchain.c_compiler_info + cflags = cmd_args(c_compiler.compiler_flags, delimiter = "\t", absolute_prefix = "%cwd%/") + cflags.add(cmd_args(get_target_sdk_version_flags(ctx), delimiter = "\t")) + env["CC"] = cmd_args(c_compiler.compiler, delimiter = "\t", absolute_prefix = "%cwd%/") + env["CGO_CFLAGS"] = cflags + env["CGO_CPPFLAGS"] = cmd_args(c_compiler.preprocessor_flags, delimiter = "\t", absolute_prefix = "%cwd%/") - cmd = get_toolchain_cmd_args(go_toolchain, go_root = True) - cmd.add([ - "GODEBUG={}".format("installgoroot=all"), - "CGO_ENABLED={}".format("1" if cgo_enabled else "0"), - go_toolchain.go_wrapper, - go_toolchain.go, - go_wrapper_args, - "install", - "-pkgdir", - stdlib_pkgdir.as_output(), - cmd_args(["-asmflags=", cmd_args(assembler_flags, delimiter = " ")], delimiter = "") if assembler_flags else [], - cmd_args(["-gcflags=", cmd_args(compiler_flags, delimiter = " ")], delimiter = "") if compiler_flags else [], - cmd_args(["-ldflags=", cmd_args(linker_flags, delimiter = " ")], delimiter = "") if linker_flags else [], - ["-tags", ",".join(tags)] if tags else [], - ["-race"] if ctx.attrs._race else [], - "std", - ]) + importcfg = ctx.actions.declare_output("stdlib.importcfg") + importcfg_shared = ctx.actions.declare_output("stdlib_shared.importcfg") + stdlib_pkgdir = ctx.actions.declare_output("stdlib_pkgdir", dir = True) + stdlib_pkgdir_shared = ctx.actions.declare_output("stdlib_pkgdir_shared", dir = True) - ctx.actions.run(cmd, category = "go_build_stdlib", identifier = "go_build_stdlib") + def build_variant(out: Artifact, shared: bool) -> cmd_args: + local_assembler_flags = [] + assembler_flags + local_compiler_flags = [] + compiler_flags + if shared: + local_assembler_flags += ["-shared"] + local_compiler_flags += ["-shared"] + return cmd_args([ + go_toolchain.go_wrapper, + ["--go", go_toolchain.go], + "install", + "-pkgdir", + out.as_output(), + cmd_args(["-asmflags=", cmd_args(local_assembler_flags, delimiter = " ")], delimiter = "") if local_assembler_flags else [], + cmd_args(["-gcflags=", cmd_args(local_compiler_flags, delimiter = " ")], delimiter = "") if local_compiler_flags else [], + cmd_args(["-ldflags=", cmd_args(linker_flags, delimiter = " ")], delimiter = "") if linker_flags else [], + ["-tags", ",".join(tags)] if tags else [], + ["-race"] if ctx.attrs._race else [], + "std", + ]) + + ctx.actions.run(build_variant(stdlib_pkgdir, False), env = env, category = "go_build_stdlib", identifier = "go_build_stdlib") + ctx.actions.run(build_variant(stdlib_pkgdir_shared, True), env = env, category = "go_build_stdlib", identifier = "go_build_stdlib_shared") - importcfg = ctx.actions.declare_output("stdlib.importcfg") ctx.actions.run( [ go_toolchain.gen_stdlib_importcfg, - "--stdlib", - stdlib_pkgdir, - "--output", - importcfg.as_output(), + ["--stdlib", stdlib_pkgdir], + ["--output", importcfg.as_output()], ], category = "go_gen_stdlib_importcfg", identifier = "go_gen_stdlib_importcfg", ) + ctx.actions.run( + [ + go_toolchain.gen_stdlib_importcfg, + ["--stdlib", stdlib_pkgdir_shared], + ["--output", importcfg_shared.as_output()], + ], + category = "go_gen_stdlib_importcfg", + identifier = "go_gen_stdlib_importcfg_shared", + ) + return [ DefaultInfo(default_output = stdlib_pkgdir), - GoStdlib(pkgdir = stdlib_pkgdir, importcfg = importcfg), + GoStdlib(pkgdir = stdlib_pkgdir, importcfg = importcfg, pkgdir_shared = stdlib_pkgdir_shared, importcfg_shared = importcfg_shared), ] diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index 2c8759d2846..78b8f0ad133 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -19,10 +19,12 @@ load( "value_or", ) load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") -load(":compile.bzl", "GoTestInfo", "compile", "get_filtered_srcs", "get_inherited_compile_pkgs") +load(":compile.bzl", "GoTestInfo", "get_inherited_compile_pkgs") load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "link") +load(":package_builder.bzl", "build_package") load(":packages.bzl", "go_attr_pkg_name") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") def _gen_test_main( ctx: AnalysisContext, @@ -34,25 +36,27 @@ def _gen_test_main( Generate a `main.go` which calls tests from the given sources. """ output = ctx.actions.declare_output("main.go") - cmd = cmd_args() - cmd.add(ctx.attrs._testmaingen[RunInfo]) + cmd = [] + cmd.append(ctx.attrs._testmaingen[RunInfo]) # if ctx.attrs.coverage_mode: - # cmd.add(cmd_args(ctx.attrs.coverage_mode, format = "--cover-mode={}")) - cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(pkg_name, format = "--import-path={}")) + # cmd.append(cmd_args(ctx.attrs.coverage_mode, format = "--cover-mode={}")) + cmd.append(cmd_args(output.as_output(), format = "--output={}")) + cmd.append(cmd_args(pkg_name, format = "--import-path={}")) if coverage_mode != None: - cmd.add("--cover-mode", coverage_mode.value) + cmd.extend(["--cover-mode", coverage_mode.value]) for _, vars in coverage_vars.items(): - cmd.add(vars) - cmd.add(srcs) - ctx.actions.run(cmd, category = "go_test_main_gen") + cmd.append(vars) + cmd.append(srcs) + ctx.actions.run(cmd_args(cmd), category = "go_test_main_gen") return output def is_subpackage_of(other_pkg_name: str, pkg_name: str) -> bool: return pkg_name == other_pkg_name or other_pkg_name.startswith(pkg_name + "/") def go_test_impl(ctx: AnalysisContext) -> list[Provider]: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + deps = ctx.attrs.deps srcs = ctx.attrs.srcs pkg_name = go_attr_pkg_name(ctx) @@ -67,8 +71,6 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: # TODO: should we assert that pkg_name != None here? pkg_name = lib.pkg_name - srcs = get_filtered_srcs(ctx, srcs, tests = True) - # If coverage is enabled for this test, we need to preprocess the sources # with the Go cover tool. coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None @@ -76,15 +78,21 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: pkgs = {} # Compile all tests into a package. - tests = compile( + tests, tests_pkg_info = build_package( ctx, pkg_name, - srcs, + srcs = srcs, + package_root = ctx.attrs.package_root, deps = deps, pkgs = pkgs, - compile_flags = ctx.attrs.compiler_flags, + compiler_flags = ctx.attrs.compiler_flags, + tags = ctx.attrs._tags, coverage_mode = coverage_mode, race = ctx.attrs._race, + asan = ctx.attrs._asan, + embedcfg = ctx.attrs.embedcfg, + tests = True, + cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs.cgo_enabled), ) if coverage_mode != None: @@ -94,32 +102,34 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: for name, pkg in get_inherited_compile_pkgs(deps).items(): if ctx.label != None and is_subpackage_of(name, ctx.label.package): coverage_vars[name] = pkg.coverage_vars - pkgs[name] = pkg.pkg + pkgs[name] = pkg - pkgs[pkg_name] = tests.pkg + pkgs[pkg_name] = tests # Generate a main function which runs the tests and build that into another # package. - gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, srcs) - main = compile(ctx, "main", cmd_args(gen_main), pkgs = pkgs, coverage_mode = coverage_mode, race = ctx.attrs._race) + gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, tests.srcs_list) + main, _ = build_package(ctx, "main", [gen_main], package_root = "", pkgs = pkgs, coverage_mode = coverage_mode, race = ctx.attrs._race, asan = ctx.attrs._asan, cgo_gen_dir_name = "cgo_gen_test_main") # Link the above into a Go binary. (bin, runtime_files, external_debug_info) = link( ctx = ctx, - main = main.pkg, + main = main, pkgs = pkgs, deps = deps, link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, - shared = False, race = ctx.attrs._race, + asan = ctx.attrs._asan, + external_linker_flags = ctx.attrs.external_linker_flags, ) - run_cmd = cmd_args(bin).hidden(runtime_files, external_debug_info) - # As per v1, copy in resources next to binary. + copied_resources = [] for resource in ctx.attrs.resources: - run_cmd.hidden(ctx.actions.copy_file(resource.short_path, resource)) + copied_resources.append(ctx.actions.copy_file(resource.short_path, resource)) + + run_cmd = cmd_args(bin, hidden = [runtime_files, external_debug_info] + copied_resources) # Setup RE executors based on the `remote_execution` param. re_executor, executor_overrides = get_re_executors_from_props(ctx) @@ -143,4 +153,5 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: default_output = bin, other_outputs = [gen_main] + runtime_files + external_debug_info, ), + tests_pkg_info, ] diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 600c5d51f67..1b66de1176f 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -12,6 +12,8 @@ load( "executable_shared_lib_arguments", "make_link_args", ) +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:linker.bzl", "get_default_shared_library_name", "get_shared_library_name_linker_flags") load( "@prelude//linking:link_info.bzl", "LinkStyle", @@ -24,19 +26,19 @@ load( "merge_shared_libraries", "traverse_shared_library_info", ) +load("@prelude//linking:stamp_build_info.bzl", "stamp_build_info") load("@prelude//os_lookup:defs.bzl", "OsLookup") load( "@prelude//utils:utils.bzl", - "map_idx", + "filter_and_map_idx", ) load( ":packages.bzl", "GoPkg", # @Unused used as type "make_importcfg", "merge_pkgs", - "pkg_artifacts", ) -load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") # Provider wrapping packages used for linking. GoPkgLinkInfo = provider(fields = { @@ -44,8 +46,9 @@ GoPkgLinkInfo = provider(fields = { }) GoBuildMode = enum( - "executable", - "c_shared", + "executable", # non-pic executable + "c_shared", # pic C-shared library + "c_archive", # pic C-static library ) def _build_mode_param(mode: GoBuildMode) -> str: @@ -53,6 +56,8 @@ def _build_mode_param(mode: GoBuildMode) -> str: return "exe" if mode == GoBuildMode("c_shared"): return "c-shared" + if mode == GoBuildMode("c_archive"): + return "c-archive" fail("unexpected: {}", mode) def get_inherited_link_pkgs(deps: list[Dependency]) -> dict[str, GoPkg]: @@ -77,42 +82,52 @@ def _process_shared_dependencies( shlib_info = merge_shared_libraries( ctx.actions, - deps = filter(None, map_idx(SharedLibraryInfo, deps)), + deps = filter_and_map_idx(SharedLibraryInfo, deps), ) - shared_libs = {} - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): - shared_libs[name] = shared_lib.lib + shared_libs = traverse_shared_library_info(shlib_info) return executable_shared_lib_arguments( ctx, - ctx.attrs._go_toolchain[GoToolchainInfo].cxx_toolchain_for_linking, + ctx.attrs._cxx_toolchain[CxxToolchainInfo], artifact, shared_libs, ) def link( ctx: AnalysisContext, - main: Artifact, - pkgs: dict[str, Artifact] = {}, + main: GoPkg, + pkgs: dict[str, GoPkg] = {}, deps: list[Dependency] = [], build_mode: GoBuildMode = GoBuildMode("executable"), link_mode: [str, None] = None, link_style: LinkStyle = LinkStyle("static"), linker_flags: list[typing.Any] = [], external_linker_flags: list[typing.Any] = [], - shared: bool = False, - race: bool = False): + race: bool = False, + asan: bool = False): go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] if go_toolchain.env_go_os == "windows": executable_extension = ".exe" shared_extension = ".dll" + archive_extension = ".lib" else: executable_extension = "" shared_extension = ".so" - file_extension = shared_extension if build_mode == GoBuildMode("c_shared") else executable_extension - output = ctx.actions.declare_output(ctx.label.name + file_extension) - - cmd = get_toolchain_cmd_args(go_toolchain) + archive_extension = ".a" + + if build_mode == GoBuildMode("c_shared"): + file_extension = shared_extension + use_shared_code = True # PIC + elif build_mode == GoBuildMode("c_archive"): + file_extension = archive_extension + use_shared_code = True # PIC + else: # GoBuildMode("executable") + file_extension = executable_extension + use_shared_code = False # non-PIC + final_output_name = ctx.label.name + file_extension + output = ctx.actions.declare_output(ctx.label.name + "-tmp" + file_extension) + + cmd = cmd_args() cmd.add(go_toolchain.linker) cmd.add(go_toolchain.linker_flags) @@ -124,13 +139,18 @@ def link( if race: cmd.add("-race") + if asan: + cmd.add("-asan") + # Add inherited Go pkgs to library search path. all_pkgs = merge_pkgs([ pkgs, - pkg_artifacts(get_inherited_link_pkgs(deps)), + get_inherited_link_pkgs(deps), ]) - importcfg = make_importcfg(ctx, "", all_pkgs, with_importmap = False) + identifier_prefix = ctx.label.name + "_" + _build_mode_param(build_mode) + + importcfg = make_importcfg(ctx, identifier_prefix, all_pkgs, use_shared_code) cmd.add("-importcfg", importcfg) @@ -139,35 +159,38 @@ def link( if link_mode == None: if build_mode == GoBuildMode("c_shared"): link_mode = "external" - elif shared: + if build_mode == GoBuildMode("c_archive"): link_mode = "external" if link_mode != None: cmd.add("-linkmode", link_mode) - cxx_toolchain = go_toolchain.cxx_toolchain_for_linking - if cxx_toolchain == None and link_mode == "external": - fail("cxx_toolchain required for link_mode='external'") + cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] if cxx_toolchain != None: is_win = ctx.attrs._exec_os_type[OsLookup].platform == "windows" # Gather external link args from deps. ext_links = get_link_args_for_strategy(ctx, cxx_inherited_link_info(deps), to_link_strategy(link_style)) ext_link_args_output = make_link_args( + ctx, ctx.actions, cxx_toolchain, [ext_links], ) - ext_link_args = cmd_args() + ext_link_args = cmd_args(hidden = ext_link_args_output.hidden) ext_link_args.add(cmd_args(executable_args.extra_link_args, quote = "shell")) ext_link_args.add(external_linker_flags) ext_link_args.add(ext_link_args_output.link_args) - ext_link_args.hidden(ext_link_args_output.hidden) + + if build_mode == GoBuildMode("c_shared") and go_toolchain.env_go_os != "windows": + soname = get_default_shared_library_name(cxx_toolchain.linker_info, ctx.label) + soname_flags = get_shared_library_name_linker_flags(cxx_toolchain.linker_info.type, soname) + ext_link_args.add(soname_flags) # Delegate to C++ linker... # TODO: It feels a bit inefficient to generate a wrapper file for every # link. Is there some way to etract the first arg of `RunInfo`? Or maybe - # we can generate te platform-specific stuff once and re-use? + # we can generate the platform-specific stuff once and re-use? cxx_link_cmd = cmd_args( [ cxx_toolchain.linker_info.linker, @@ -177,22 +200,29 @@ def link( delimiter = " ", ) linker_wrapper, _ = ctx.actions.write( - "__{}_cxx_link_wrapper__.{}".format(ctx.label.name, "bat" if is_win else "sh"), + "__{}_cxx_link_wrapper__.{}".format(identifier_prefix, "bat" if is_win else "sh"), ([] if is_win else ["#!/bin/sh"]) + [cxx_link_cmd], allow_args = True, is_executable = True, ) - cmd.add("-extld", linker_wrapper).hidden(cxx_link_cmd) + cmd.add("-extld", linker_wrapper, cmd_args(hidden = cxx_link_cmd)) cmd.add("-extldflags", cmd_args( cxx_toolchain.linker_info.linker_flags, go_toolchain.external_linker_flags, delimiter = " ", + quote = "shell", )) cmd.add(linker_flags) - cmd.add(main) + cmd.add(main.pkg_shared if use_shared_code else main.pkg) + + env = get_toolchain_env_vars(go_toolchain) + + ctx.actions.run(cmd, env = env, category = "go_link", identifier = identifier_prefix) + + output = stamp_build_info(ctx, output) - ctx.actions.run(cmd, category = "go_link") + final_output = ctx.actions.copy_file(final_output_name, output) - return (output, executable_args.runtime_files, executable_args.external_debug_info) + return (final_output, executable_args.runtime_files, executable_args.external_debug_info) diff --git a/prelude/go/package_builder.bzl b/prelude/go/package_builder.bzl new file mode 100644 index 00000000000..f7b6d3ab13a --- /dev/null +++ b/prelude/go/package_builder.bzl @@ -0,0 +1,280 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//utils:utils.bzl", "dedupe_by_value") +load(":cgo_builder.bzl", "build_cgo") +load(":compile.bzl", "get_inherited_compile_pkgs", "infer_package_root") +load( + ":coverage.bzl", + "GoCoverageMode", # @Unused used as type +) +load(":go_list.bzl", "go_list", "parse_go_list_out") +load(":packages.bzl", "GoPackageInfo", "GoPkg", "make_importcfg", "merge_pkgs") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") + +def build_package( + ctx: AnalysisContext, + pkg_name: str, + srcs: list[Artifact], + package_root: str | None, + pkgs: dict[str, GoPkg] = {}, + deps: list[Dependency] = [], + compiler_flags: list[str] = [], + assembler_flags: list[str] = [], + tags: list[str] = [], + race: bool = False, + asan: bool = False, + cgo_enabled: bool = False, + coverage_mode: GoCoverageMode | None = None, + embedcfg: Artifact | None = None, + tests: bool = False, + cgo_gen_dir_name: str = "cgo_gen") -> (GoPkg, GoPackageInfo): + if race and coverage_mode not in [None, GoCoverageMode("atomic")]: + fail("`coverage_mode` must be `atomic` when `race=True`") + + out = ctx.actions.declare_output(paths.basename(pkg_name) + ".a") + out_shared = ctx.actions.declare_output(paths.basename(pkg_name) + "_shared.a") + + cgo_gen_dir = ctx.actions.declare_output(cgo_gen_dir_name, dir = True) + + srcs = dedupe_by_value(srcs) + + package_root = package_root if package_root != None else infer_package_root(srcs) + + go_list_out = go_list(ctx, pkg_name, srcs, package_root, tags, cgo_enabled, with_tests = tests, asan = asan) + + srcs_list_argsfile = ctx.actions.declare_output(paths.basename(pkg_name) + "_srcs_list.go_package_argsfile") + coverage_vars_argsfile = ctx.actions.declare_output(paths.basename(pkg_name) + "_coverage_vars.go_package_argsfile") + dynamic_outputs = [out, out_shared, srcs_list_argsfile, coverage_vars_argsfile, cgo_gen_dir] + + all_pkgs = merge_pkgs([ + pkgs, + get_inherited_compile_pkgs(deps), + ]) + + def f(ctx: AnalysisContext, artifacts, outputs, go_list_out = go_list_out): + go_list = parse_go_list_out(srcs, package_root, artifacts[go_list_out]) + + # Generate CGO and C sources. + cgo_go_files, cgo_o_files, cgo_gen_tmp_dir = build_cgo(ctx, go_list.cgo_files, go_list.h_files, go_list.c_files + go_list.cxx_files, go_list.cgo_cflags, go_list.cgo_cppflags) + ctx.actions.copy_dir(outputs[cgo_gen_dir], cgo_gen_tmp_dir) + + src_list_for_argsfile = go_list.go_files + (go_list.test_go_files + go_list.x_test_go_files if tests else []) + ctx.actions.write(outputs[srcs_list_argsfile], cmd_args(src_list_for_argsfile, "")) + + go_files = go_list.go_files + cgo_go_files + covered_go_files, coverage_vars_out = _cover(ctx, pkg_name, go_files, coverage_mode) + ctx.actions.write(outputs[coverage_vars_argsfile], coverage_vars_out) + + symabis = _symabis(ctx, pkg_name, go_list.s_files, assembler_flags) + + def build_variant(shared: bool) -> Artifact: + suffix = "__shared" if shared else "" # suffix to make artifacts unique + go_files_to_compile = covered_go_files + ((go_list.test_go_files + go_list.x_test_go_files) if tests else []) + importcfg = make_importcfg(ctx, pkg_name, all_pkgs, shared) + go_a_file, asmhdr = _compile(ctx, pkg_name, go_files_to_compile, importcfg, compiler_flags, shared, race, asan, suffix, embedcfg, go_list.embed_files, symabis, len(go_list.s_files) > 0) + + asm_o_files = _asssembly(ctx, pkg_name, go_list.s_files, asmhdr, assembler_flags, shared, suffix) + + return _pack(ctx, pkg_name, go_a_file, cgo_o_files + asm_o_files, suffix) + + ctx.actions.copy_file(outputs[out], build_variant(shared = False)) + ctx.actions.copy_file(outputs[out_shared], build_variant(shared = True)) + + ctx.actions.dynamic_output(dynamic = [go_list_out], inputs = [], outputs = [o.as_output() for o in dynamic_outputs], f = f) + + return GoPkg( + pkg = out, + pkg_shared = out_shared, + coverage_vars = cmd_args(coverage_vars_argsfile, format = "@{}"), + srcs_list = cmd_args(srcs_list_argsfile, format = "@{}", hidden = srcs), + ), GoPackageInfo( + build_out = out, + cgo_gen_dir = cgo_gen_dir, + package_name = pkg_name, + package_root = package_root, + go_list_out = go_list_out, + ) + +def _compile( + ctx: AnalysisContext, + pkg_name: str, + go_srcs: list[Artifact], + importcfg: cmd_args, + compiler_flags: list[str], + shared: bool, + race: bool, + asan: bool, + suffix: str, + embedcfg: Artifact | None = None, + embed_files: list[Artifact] = [], + symabis: Artifact | None = None, + gen_asmhdr: bool = False) -> (Artifact, Artifact | None): + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + + env = get_toolchain_env_vars(go_toolchain) + out = ctx.actions.declare_output("go_compile_out{}.a".format(suffix)) + + if len(go_srcs) == 0: + ctx.actions.write(out.as_output(), "") + return out, None + + asmhdr = ctx.actions.declare_output("__asmhdr__{}/go_asm.h".format(suffix)) if gen_asmhdr else None + + # Use argsfile to avoid command length limit on Windows + srcs_argsfile = ctx.actions.write(paths.basename(pkg_name) + suffix + "_srcs.go_package_argsfile", go_srcs) + + compile_cmd = cmd_args( + [ + go_toolchain.go_wrapper, + ["--go", go_toolchain.compiler], + "--", + go_toolchain.compiler_flags, + compiler_flags, + "-buildid=", + "-nolocalimports", + ["-trimpath", "%cwd%"], + ["-p", pkg_name], + ["-importcfg", importcfg], + ["-o", out.as_output()], + ["-race"] if race else [], + ["-asan"] if asan else [], + ["-shared"] if shared else [], + ["-embedcfg", embedcfg] if embedcfg else [], + ["-symabis", symabis] if symabis else [], + ["-asmhdr", asmhdr.as_output()] if asmhdr else [], + cmd_args(srcs_argsfile, format = "@{}", hidden = go_srcs), + ], + hidden = embed_files, # files and directories should be available for embedding + ) + + identifier = paths.basename(pkg_name) + ctx.actions.run(compile_cmd, env = env, category = "go_compile", identifier = identifier + suffix) + + return (out, asmhdr) + +def _symabis(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], assembler_flags: list[str]) -> Artifact | None: + if len(s_files) == 0: + return None + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env = get_toolchain_env_vars(go_toolchain) + + # we have to supply "go_asm.h" with any content to make asm tool happy + # its content doesn't matter if -gensymabis provided + # https://github.com/golang/go/blob/3f8f929d60a90c4e4e2b07c8d1972166c1a783b1/src/cmd/go/internal/work/gc.go#L441-L443 + fake_asmhdr = ctx.actions.write("__fake_asmhdr__/go_asm.h", "") + symabis = ctx.actions.declare_output("symabis") + asm_cmd = [ + go_toolchain.assembler, + go_toolchain.assembler_flags, + assembler_flags, + _asm_args(ctx, pkg_name, False), # flag -shared doesn't matter for symabis + "-gensymabis", + ["-o", symabis.as_output()], + ["-I", cmd_args(fake_asmhdr, parent = 1)], + s_files, + ] + + identifier = paths.basename(pkg_name) + ctx.actions.run(asm_cmd, env = env, category = "go_symabis", identifier = identifier) + + return symabis + +def _asssembly(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], asmhdr: Artifact | None, assembler_flags: list[str], shared: bool, suffix: str) -> list[Artifact]: + if len(s_files) == 0: + return [] + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env = get_toolchain_env_vars(go_toolchain) + + o_files = [] + identifier = paths.basename(pkg_name) + for s_file in s_files: + o_file = ctx.actions.declare_output(s_file.short_path + suffix + ".o") + o_files.append(o_file) + + asm_cmd = [ + go_toolchain.assembler, + go_toolchain.assembler_flags, + assembler_flags, + _asm_args(ctx, pkg_name, shared), + ["-o", o_file.as_output()], + ["-I", cmd_args(asmhdr, parent = 1)] if asmhdr else [], # can it actually be None? + s_file, + ] + + ctx.actions.run(asm_cmd, env = env, category = "go_assembly", identifier = identifier + "/" + s_file.short_path + suffix) + + return o_files + +def _pack(ctx: AnalysisContext, pkg_name: str, a_file: Artifact, o_files: list[Artifact], suffix: str) -> Artifact: + if len(o_files) == 0: + # no need to repack .a file, if there are no .o files + return a_file + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env = get_toolchain_env_vars(go_toolchain) + + pkg_file = ctx.actions.declare_output("pkg{}.a".format(suffix)) + + pack_cmd = [ + go_toolchain.packer, + "c", + pkg_file.as_output(), + a_file, + o_files, + ] + + identifier = paths.basename(pkg_name) + ctx.actions.run(pack_cmd, env = env, category = "go_pack", identifier = identifier + suffix) + + return pkg_file + +def _asm_args(ctx: AnalysisContext, pkg_name: str, shared: bool): + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + return [ + ["-p", pkg_name], + ["-I", cmd_args(go_toolchain.env_go_root, absolute_suffix = "/pkg/include")], + ["-D", "GOOS_" + go_toolchain.env_go_os] if go_toolchain.env_go_os else [], + ["-D", "GOARCH_" + go_toolchain.env_go_arch] if go_toolchain.env_go_arch else [], + ["-shared"] if shared else [], + ] + +def _cover(ctx: AnalysisContext, pkg_name: str, go_files: list[Artifact], coverage_mode: GoCoverageMode | None) -> (list[Artifact], str | cmd_args): + if coverage_mode == None: + return go_files, "" + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env = get_toolchain_env_vars(go_toolchain) + covered_files = [] + coverage_vars = {} + for go_file in go_files: + covered_file = ctx.actions.declare_output("with_coverage", go_file.short_path) + covered_files.append(covered_file) + + var = "Var_" + sha256(pkg_name + "::" + go_file.short_path) + coverage_vars[var] = go_file.short_path + + cover_cmd = [ + go_toolchain.cover, + ["-mode", coverage_mode.value], + ["-var", var], + ["-o", covered_file.as_output()], + go_file, + ] + + ctx.actions.run(cover_cmd, env = env, category = "go_cover", identifier = paths.basename(pkg_name) + "/" + go_file.short_path) + + coverage_vars_out = "" + if len(coverage_vars) > 0: + # convert coverage_vars to argsfile for compatibility with python implementation + cover_pkg = "{}:{}".format(pkg_name, ",".join(["{}={}".format(var, name) for var, name in coverage_vars.items()])) + coverage_vars_out = cmd_args("--cover-pkgs", cover_pkg) + + return covered_files, coverage_vars_out diff --git a/prelude/go/packages.bzl b/prelude/go/packages.bzl index a07ba3f4f14..b3148e055c4 100644 --- a/prelude/go/packages.bzl +++ b/prelude/go/packages.bzl @@ -5,19 +5,40 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//cxx:headers.bzl", "prepare_headers") +load( + "@prelude//cxx:preprocessor.bzl", + "CPreprocessor", + "CPreprocessorArgs", +) load("@prelude//go:toolchain.bzl", "GoToolchainInfo") load("@prelude//utils:utils.bzl", "value_or") +# Information about a package for GOPACKAGESDRIVER +GoPackageInfo = provider( + fields = { + "build_out": provider_field(Artifact), + "cgo_gen_dir": provider_field(Artifact), + "go_list_out": provider_field(Artifact), + "package_name": provider_field(str), + "package_root": provider_field(str), + }, +) + GoPkg = record( - cgo = field(bool, default = False), + # We have to produce allways shared (PIC) and non-shared (non-PIC) archives pkg = field(Artifact), - coverage_vars = field(cmd_args | None, default = None), + pkg_shared = field(Artifact), + coverage_vars = field(cmd_args), + srcs_list = field(cmd_args), ) GoStdlib = provider( fields = { "importcfg": provider_field(Artifact), + "importcfg_shared": provider_field(Artifact), "pkgdir": provider_field(Artifact), + "pkgdir_shared": provider_field(Artifact), }, ) @@ -27,7 +48,7 @@ def go_attr_pkg_name(ctx: AnalysisContext) -> str: """ return value_or(ctx.attrs.package_name, ctx.label.package) -def merge_pkgs(pkgss: list[dict[str, typing.Any]]) -> dict[str, typing.Any]: +def merge_pkgs(pkgss: list[dict[str, GoPkg]]) -> dict[str, GoPkg]: """ Merge mappings of packages into a single mapping, throwing an error on conflicts. @@ -36,54 +57,62 @@ def merge_pkgs(pkgss: list[dict[str, typing.Any]]) -> dict[str, typing.Any]: all_pkgs = {} for pkgs in pkgss: - for name, path in pkgs.items(): - if name in pkgs and pkgs[name] != path: - fail("conflict for package {!r}: {} and {}".format(name, path, all_pkgs[name])) - all_pkgs[name] = path + for name, pkg in pkgs.items(): + if name in all_pkgs and all_pkgs[name] != pkg: + fail("conflict for package {!r}: {} and {}".format(name, pkg, all_pkgs[name])) + all_pkgs[name] = pkg return all_pkgs -def pkg_artifacts(pkgs: dict[str, GoPkg]) -> dict[str, Artifact]: +def pkg_artifacts(pkgs: dict[str, GoPkg], shared: bool) -> dict[str, Artifact]: """ Return a map package name to a `shared` or `static` package artifact. """ return { - name: pkg.pkg + name: pkg.pkg_shared if shared else pkg.pkg for name, pkg in pkgs.items() } def make_importcfg( ctx: AnalysisContext, - pkg_name: str, - own_pkgs: dict[str, typing.Any], - with_importmap: bool) -> cmd_args: + prefix_name: str, + own_pkgs: dict[str, GoPkg], + shared: bool) -> cmd_args: go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] stdlib = ctx.attrs._go_stdlib[GoStdlib] + suffix = "__shared" if shared else "" # suffix to make artifacts unique content = [] - for name_, pkg_ in own_pkgs.items(): + pkg_artifacts_map = pkg_artifacts(own_pkgs, shared) + for name_, pkg_ in pkg_artifacts_map.items(): # Hack: we use cmd_args get "artifact" valid path and write it to a file. content.append(cmd_args("packagefile ", name_, "=", pkg_, delimiter = "")) - # Future work: support importmap in buck rules insted of hacking here. - if with_importmap and name_.startswith("third-party-source/go/"): - real_name_ = name_.removeprefix("third-party-source/go/") - content.append(cmd_args("importmap ", real_name_, "=", name_, delimiter = "")) - - own_importcfg = ctx.actions.declare_output("{}.importcfg".format(pkg_name)) + own_importcfg = ctx.actions.declare_output("{}{}.importcfg".format(prefix_name, suffix)) ctx.actions.write(own_importcfg, content) - final_importcfg = ctx.actions.declare_output("{}.final.importcfg".format(pkg_name)) + final_importcfg = ctx.actions.declare_output("{}{}.final.importcfg".format(prefix_name, suffix)) ctx.actions.run( [ go_toolchain.concat_files, "--output", final_importcfg.as_output(), - stdlib.importcfg, + stdlib.importcfg_shared if shared else stdlib.importcfg, own_importcfg, ], category = "concat_importcfgs", - identifier = pkg_name, + identifier = prefix_name + suffix, ) - return cmd_args(final_importcfg).hidden(stdlib.pkgdir).hidden(own_pkgs.values()) + return cmd_args(final_importcfg, hidden = [stdlib.pkgdir_shared if shared else stdlib.pkgdir, pkg_artifacts_map.values()]) + +# Return "_cgo_export.h" to expose exported C declarations to non-Go rules +def cgo_exported_preprocessor(ctx: AnalysisContext, pkg_info: GoPackageInfo) -> CPreprocessor: + return CPreprocessor(args = CPreprocessorArgs(args = [ + "-I", + prepare_headers( + ctx, + {"{}/{}.h".format(ctx.label.package, ctx.label.name): pkg_info.cgo_gen_dir.project("_cgo_export.h")}, + "cgo-exported-headers", + ).include_path, + ])) diff --git a/prelude/go/tags/constraints/BUCK.v2 b/prelude/go/tags/constraints/BUCK.v2 new file mode 100644 index 00000000000..38f5c36f82a --- /dev/null +++ b/prelude/go/tags/constraints/BUCK.v2 @@ -0,0 +1,8 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") +load(":defs.bzl", "generate_tag_constraints") + +oncall("build_infra") + +source_listing() + +generate_tag_constraints() diff --git a/prelude/go/constraints/defs.bzl b/prelude/go/tags/constraints/defs.bzl similarity index 85% rename from prelude/go/constraints/defs.bzl rename to prelude/go/tags/constraints/defs.bzl index 8da0b3e8103..92923c5f7a2 100644 --- a/prelude/go/constraints/defs.bzl +++ b/prelude/go/tags/constraints/defs.bzl @@ -10,14 +10,14 @@ load("@prelude//go/transitions:tags_helper.bzl", "allowed_tags") def generate_tag_constraints(): for tag in allowed_tags: - setting_name = "tag_" + tag + setting_name = "setting__" + tag native.constraint_setting( - name = "tag_" + tag, + name = setting_name, visibility = ["PUBLIC"], ) native.constraint_value( - name = setting_name + "__value", + name = tag, constraint_setting = ":" + setting_name, visibility = ["PUBLIC"], ) diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index 978b5e2ad9d..97a9c691d5f 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -5,77 +5,60 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") - GoToolchainInfo = provider( # @unsorted-dict-items fields = { "assembler": provider_field(RunInfo), "assembler_flags": provider_field(typing.Any, default = None), "c_compiler_flags": provider_field(typing.Any, default = None), - "cgo": provider_field(RunInfo | None, default = None), + "cgo": provider_field(RunInfo), "cgo_wrapper": provider_field(RunInfo), "gen_stdlib_importcfg": provider_field(RunInfo), "go_wrapper": provider_field(RunInfo), - "compile_wrapper": provider_field(RunInfo), "compiler": provider_field(RunInfo), "compiler_flags": provider_field(typing.Any, default = None), "concat_files": provider_field(RunInfo), "cover": provider_field(RunInfo), - "cover_srcs": provider_field(RunInfo), - "cxx_toolchain_for_linking": provider_field(CxxToolchainInfo | None, default = None), - "env_go_arch": provider_field(typing.Any, default = None), - "env_go_os": provider_field(typing.Any, default = None), - "env_go_arm": provider_field(typing.Any, default = None), + "default_cgo_enabled": provider_field(bool, default = False), + "env_go_arch": provider_field(str), + "env_go_os": provider_field(str), + "env_go_arm": provider_field(str | None, default = None), "env_go_root": provider_field(typing.Any, default = None), "env_go_debug": provider_field(dict[str, str], default = {}), "external_linker_flags": provider_field(typing.Any, default = None), - "filter_srcs": provider_field(RunInfo), "go": provider_field(RunInfo), "linker": provider_field(RunInfo), "linker_flags": provider_field(typing.Any, default = None), "packer": provider_field(RunInfo), - "tags": provider_field(typing.Any, default = None), + "tags": provider_field(list[str], default = []), }, ) -def get_toolchain_cmd_args(toolchain: GoToolchainInfo, go_root = True, force_disable_cgo = False) -> cmd_args: - cmd = cmd_args("env") - - # opt-out from Go1.20 coverage redisign - cmd.add("GOEXPERIMENT=nocoverageredesign") +def get_toolchain_env_vars(toolchain: GoToolchainInfo) -> dict[str, str | cmd_args]: + env = { + "GOARCH": toolchain.env_go_arch, + # opt-out from Go1.20 coverage redesign + "GOEXPERIMENT": "nocoverageredesign", + "GOOS": toolchain.env_go_os, + } - if toolchain.env_go_arch != None: - cmd.add("GOARCH={}".format(toolchain.env_go_arch)) - if toolchain.env_go_os != None: - cmd.add("GOOS={}".format(toolchain.env_go_os)) if toolchain.env_go_arm != None: - cmd.add("GOARM={}".format(toolchain.env_go_arm)) - if go_root and toolchain.env_go_root != None: - cmd.add(cmd_args(toolchain.env_go_root, format = "GOROOT={}")) + env["GOARM"] = toolchain.env_go_arm + if toolchain.env_go_root != None: + env["GOROOT"] = toolchain.env_go_root if toolchain.env_go_debug: godebug = ",".join(["{}={}".format(k, v) for k, v in toolchain.env_go_debug.items()]) - cmd.add("GODEBUG={}".format(godebug)) - if force_disable_cgo: - cmd.add("CGO_ENABLED=0") - else: - # CGO is enabled by default for native compilation, but we need to set it - # explicitly for cross-builds: - # https://go-review.googlesource.com/c/go/+/12603/2/src/cmd/cgo/doc.go - if toolchain.cgo != None: - cmd.add("CGO_ENABLED=1") - - return cmd + env["GODEBUG"] = godebug -# Sets default value of cgo_enabled attribute based on the presence of C++ toolchain. -def evaluate_cgo_enabled(toolchain: GoToolchainInfo, cgo_enabled: [bool, None]) -> bool: - cxx_toolchain_available = toolchain.cxx_toolchain_for_linking != None + return env - if cgo_enabled and not cxx_toolchain_available: - fail("Cgo requires a C++ toolchain. Set cgo_enabled=None|False.") +# Sets default value of cgo_enabled attribute based on default_cgo_enabled attribute of GoToolchainInfo +def evaluate_cgo_enabled(toolchain: GoToolchainInfo, cgo_enabled: [bool, None], override_cgo_enabled: [bool, None] = None) -> bool: + if override_cgo_enabled != None: + return override_cgo_enabled if cgo_enabled != None: return cgo_enabled - # Return True if cxx_toolchain availabe for current configuration, otherwiese to False. - return cxx_toolchain_available + # Sadly we can't add a check if cxx_toolchain available, because it's always set even when it doesn't make sense + return toolchain.default_cgo_enabled diff --git a/prelude/go/tools/BUCK.v2 b/prelude/go/tools/BUCK.v2 index 92b006f5bfc..b83c397be2a 100644 --- a/prelude/go/tools/BUCK.v2 +++ b/prelude/go/tools/BUCK.v2 @@ -1,28 +1,10 @@ -prelude = native +load("@prelude//utils:source_listing.bzl", "source_listing") -prelude.python_bootstrap_binary( - name = "compile_wrapper", - main = "compile_wrapper.py", - visibility = ["PUBLIC"], -) +oncall("build_infra") -prelude.python_bootstrap_binary( - name = "concat_files", - main = "concat_files.py", - visibility = ["PUBLIC"], -) +source_listing() -prelude.python_bootstrap_binary( - name = "cover_srcs", - main = "cover_srcs.py", - visibility = ["PUBLIC"], -) - -prelude.python_bootstrap_binary( - name = "filter_srcs", - main = "filter_srcs.py", - visibility = ["PUBLIC"], -) +prelude = native prelude.python_bootstrap_binary( name = "cgo_wrapper", @@ -37,21 +19,11 @@ prelude.python_bootstrap_binary( ) prelude.python_bootstrap_binary( - name = "go_wrapper", + name = "go_wrapper_py", main = "go_wrapper.py", visibility = ["PUBLIC"], ) -prelude.go_binary( - name = "testmaingen", - srcs = [ - "testmaingen.go", - ], - visibility = [ - "PUBLIC", - ], -) - prelude.go_stdlib( name = "stdlib", visibility = ["PUBLIC"], diff --git a/prelude/go/tools/cgo_wrapper.py b/prelude/go/tools/cgo_wrapper.py index 38e4b845fbd..44f98ab6354 100644 --- a/prelude/go/tools/cgo_wrapper.py +++ b/prelude/go/tools/cgo_wrapper.py @@ -12,7 +12,6 @@ import os import subprocess import sys -import tempfile from pathlib import Path @@ -20,34 +19,20 @@ def main(argv): parser = argparse.ArgumentParser(fromfile_prefix_chars="@") parser.add_argument("--cgo", action="append", default=[]) parser.add_argument("--output", required=True, type=Path) - parser.add_argument("--cpp", action="append", default=[]) - parser.add_argument("--env-cc", action="append", default=[]) - parser.add_argument("--env-ldflags", action="append", default=[]) parser.add_argument("srcs", type=Path, nargs="*") args = parser.parse_args(argv[1:]) output = args.output.resolve(strict=False) + # the only reason we need this whapper is to create `-objdir`, + # because neither `go tool cgo` nor buck can create it. os.makedirs(output, exist_ok=True) env = os.environ.copy() - env["CC"] = " ".join(args.env_cc) - env["CGO_LDFLAGS"] = " ".join(args.env_ldflags) cmd = [] cmd.extend(args.cgo) - # cmd.append("-importpath={}") - # cmd.append("-srcdir={}") cmd.append(f"-objdir={output}") - # cmd.append(cgoCompilerFlags) cmd.append("--") - # cmd.append(cxxCompilerFlags) - - if args.cpp: - with tempfile.NamedTemporaryFile("w", delete=False) as argsfile: - for arg in args.cpp: - print(arg, file=argsfile) - argsfile.flush() - cmd.append("@" + argsfile.name) cmd.extend(args.srcs) return subprocess.call(cmd, env=env) diff --git a/prelude/go/tools/compile_wrapper.py b/prelude/go/tools/compile_wrapper.py deleted file mode 100755 index b6b743e04ab..00000000000 --- a/prelude/go/tools/compile_wrapper.py +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -""" -Compile the given Go sources into a Go package. - -Example: - - $ ./compile_wrapper.py \ - --compiler compile \ - --assembler assemble \ - --output srcs.txt src/dir/ - -""" - -# pyre-unsafe - -import argparse -import contextlib -import os -import subprocess -import sys -import tempfile -from pathlib import Path -from typing import List - - -def _call_or_exit(cmd: List[str]): - ret = subprocess.call(cmd) - if ret != 0: - sys.exit(ret) - - -def _compile(compile_prefix: List[str], output: Path, srcs: List[Path]): - cmd = [] - cmd.extend(compile_prefix) - cmd.append("-trimpath={}".format(os.getcwd())) - cmd.append("-o") - cmd.append(output) - cmd.extend(srcs) - _call_or_exit(cmd) - - -def _pack(pack_prefix: List[str], output: Path, items: List[Path]): - cmd = [] - cmd.extend(pack_prefix) - cmd.append("r") - cmd.append(output) - cmd.extend(items) - _call_or_exit(cmd) - - -def main(argv): - parser = argparse.ArgumentParser(fromfile_prefix_chars="@") - parser.add_argument("--compiler", action="append", default=[]) - parser.add_argument("--assembler", action="append", default=[]) - parser.add_argument("--packer", action="append", default=[]) - parser.add_argument("--embedcfg", type=Path, default=None) - parser.add_argument("--output", required=True, type=Path) - parser.add_argument("srcs", type=Path, nargs="*") - args = parser.parse_args(argv[1:]) - - # If there's no srcs, just leave an empty file. - if not args.srcs: - args.output.touch() - return - - # go:embed does not parse symlinks, so following the links to the real paths - real_srcs = [s.resolve() for s in args.srcs] - - go_files = [s for s in real_srcs if s.suffix == ".go"] - s_files = [s for s in real_srcs if s.suffix == ".s"] - o_files = [s for s in real_srcs if s.suffix in (".o", ".obj")] - - with contextlib.ExitStack() as stack: - - asmhdr_dir = None - - assemble_prefix = [] - assemble_prefix.extend(args.assembler) - - if go_files: - compile_prefix = [] - compile_prefix.extend(args.compiler) - - # If we have assembly files, generate the symabi file to compile - # against, and the asm header. - if s_files: - asmhdr_dir = stack.push(tempfile.TemporaryDirectory()) - - asmhdr = Path(asmhdr_dir.name) / "go_asm.h" - asmhdr.touch() - compile_prefix.extend(["-asmhdr", asmhdr]) - assemble_prefix.extend(["-I", asmhdr_dir.name]) - assemble_prefix.extend( - ["-I", os.path.join(os.environ["GOROOT"], "pkg", "include")] - ) - assemble_prefix.extend(["-D", f"GOOS_{os.environ['GOOS']}"]) - assemble_prefix.extend(["-D", f"GOARCH_{os.environ['GOARCH']}"]) - if "GOAMD64" in os.environ and os.environ["GOARCH"] == "amd64": - assemble_prefix.extend(["-D", f"GOAMD64_{os.environ['GOAMD64']}"]) - - # Note: at this point go_asm.h is empty, but that's OK. As per the Go compiler: - # https://github.com/golang/go/blob/3f8f929d60a90c4e4e2b07c8d1972166c1a783b1/src/cmd/go/internal/work/gc.go#L441-L443 - symabis = args.output.with_suffix(".symabis") - _compile(assemble_prefix + ["-gensymabis"], symabis, s_files) - compile_prefix.extend(["-symabis", symabis]) - - if args.embedcfg is not None: - compile_prefix.extend( - [ - "-embedcfg", - args.embedcfg, - ] - ) - - # This will create go_asm.h - _compile(compile_prefix, args.output, go_files) - - else: - args.output.touch() - - # If there are assembly files, assemble them to an object and add into the - # output archive. - for s_file in s_files: - s_object = args.output.with_name(s_file.name).with_suffix(".o") - _compile(assemble_prefix, s_object, [s_file]) - o_files.append(s_object) - - if o_files: - _pack(args.packer, args.output, o_files) - - -sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/concat_files.py b/prelude/go/tools/concat_files.py deleted file mode 100644 index 145335a2885..00000000000 --- a/prelude/go/tools/concat_files.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -import argparse -import sys -from pathlib import Path - - -def main(argv): - parser = argparse.ArgumentParser(fromfile_prefix_chars="@") - parser.add_argument("--output", required=True, type=Path) - parser.add_argument("files", type=Path, nargs="*") - args = parser.parse_args(argv[1:]) - - if len(args.files) == 0: - print( - "usage: concat_files.py --output out.txt in1.txt in2.txt", file=sys.stderr - ) - return 1 - - with open(args.output, "wb") as outfile: - for f in args.files: - with open(f, "rb") as infile: - outfile.write(infile.read()) - - return 0 - - -sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/cover_srcs.py b/prelude/go/tools/cover_srcs.py deleted file mode 100644 index 1dabf647ad9..00000000000 --- a/prelude/go/tools/cover_srcs.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -""" -Run `go cover` on non-`_test.go` input sources. -""" - -# pyre-unsafe - -import argparse -import hashlib -import subprocess -import sys -from pathlib import Path - - -def _var(pkg_name, src): - return "Var_" + hashlib.md5(f"{pkg_name}::{src}".encode("utf-8")).hexdigest() - - -def main(argv): - parser = argparse.ArgumentParser(fromfile_prefix_chars="@") - parser.add_argument("--cover", type=Path, required=True) - parser.add_argument("--pkg-name", type=str, required=True) - parser.add_argument("--coverage-mode", type=str, required=True) - parser.add_argument("--covered-srcs-dir", type=Path, required=True) - parser.add_argument("--out-srcs-argsfile", type=Path, required=True) - parser.add_argument("--coverage-var-argsfile", type=Path, required=True) - parser.add_argument("srcs", nargs="*", type=Path) - args = parser.parse_args(argv[1:]) - - out_srcs = [] - coverage_vars = {} - - args.covered_srcs_dir.mkdir(parents=True) - - for src in args.srcs: - # don't cover test files or non-go files (e.g. assembly files) - if src.name.endswith("_test.go") or not src.name.endswith(".go"): - out_srcs.append(src) - else: - var = _var(args.pkg_name, src) - covered_src = args.covered_srcs_dir / src - covered_src.parent.mkdir(parents=True, exist_ok=True) - subprocess.check_call( - [ - args.cover, - "-mode", - args.coverage_mode, - "-var", - var, - "-o", - covered_src, - src, - ] - ) - # we need just the source name for the --cover-pkgs argument - coverage_vars[var] = src.name - out_srcs.append(covered_src) - - with open(args.out_srcs_argsfile, mode="w") as f: - for src in out_srcs: - print(src, file=f) - - with open(args.coverage_var_argsfile, mode="w") as f: - if coverage_vars: - print("--cover-pkgs", file=f) - print( - "{}:{}".format( - args.pkg_name, - ",".join([f"{var}={name}" for var, name in coverage_vars.items()]), - ), - file=f, - ) - - -sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/filter_srcs.py b/prelude/go/tools/filter_srcs.py deleted file mode 100755 index a242e981da1..00000000000 --- a/prelude/go/tools/filter_srcs.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -""" -Run on a directory of Go source files and print out a list of srcs that should -be compiled. - -Example: - - $ ./filter_srcs.py --output srcs.txt src/dir/ - -""" - -# pyre-unsafe - -import argparse -import json -import os -import subprocess -import sys -import tempfile -from pathlib import Path - - -def main(argv): - parser = argparse.ArgumentParser() - parser.add_argument("--go", default="go", type=Path) - parser.add_argument("--tests", action="store_true") - parser.add_argument("--tags", default="") - parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout) - parser.add_argument("srcdir", type=Path) - args = parser.parse_args(argv[1:]) - - # Find all source sub-dirs, which we'll need to run `go list` from. - roots = set() - for root, _dirs, _files in os.walk(args.srcdir): - roots.add(root) - - # Compute absolute paths for GOROOT, to enable `go list` to use `compile/asm/etc` - goroot = os.environ.get("GOROOT", "") - if goroot: - goroot = os.path.realpath(goroot) - - # Run `go list` on all source dirs to filter input sources by build pragmas. - for root in roots: - with tempfile.TemporaryDirectory() as go_cache_dir: - out = subprocess.check_output( - [ - "env", - "-i", - "GOROOT={}".format(goroot), - "GOARCH={}".format(os.environ.get("GOARCH", "")), - "GOOS={}".format(os.environ.get("GOOS", "")), - "CGO_ENABLED={}".format(os.environ.get("CGO_ENABLED", "0")), - "GO111MODULE=off", - "GOCACHE=" + go_cache_dir, - args.go.resolve(), - "list", - "-e", - "-json", - "-tags", - args.tags, - ".", - ], - cwd=root, - ).decode("utf-8") - - # Parse JSON output and print out sources. - idx = 0 - decoder = json.JSONDecoder() - while idx < len(out) - 1: - # The raw_decode method fails if there are any leading spaces, e.g. " {}" fails - # so manually trim the prefix of the string - if out[idx].isspace(): - idx += 1 - continue - - obj, idx = decoder.raw_decode(out, idx) - types = ["GoFiles", "EmbedFiles"] - if args.tests: - types.extend(["TestGoFiles", "XTestGoFiles"]) - else: - types.extend(["SFiles"]) - for typ in types: - for src in obj.get(typ, []): - src = Path(obj["Dir"]) / src - # Resolve the symlink - src = Path( - os.path.normpath(str(src.parent / os.readlink(str(src)))) - ) - # Relativize to the CWD. - src = src.relative_to(os.getcwd()) - print(src, file=args.output) - - args.output.close() - - -sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/go_wrapper.py b/prelude/go/tools/go_wrapper.py index 83b3c9ef526..4a4ed0981d9 100644 --- a/prelude/go/tools/go_wrapper.py +++ b/prelude/go/tools/go_wrapper.py @@ -7,11 +7,46 @@ import argparse import os +import shlex import subprocess import sys from pathlib import Path +# A copy of "cmd/internal/quoted" translated into Python with GPT-4 +# Source: https://github.com/golang/go/blob/7e9894449e8a12157a28a4a14fc9341353a6469c/src/cmd/internal/quoted/quoted.go#L65 +def go_join(args): + buf = [] + for i, arg in enumerate(args): + if i > 0: + buf.append(" ") + saw_space, saw_single_quote, saw_double_quote = False, False, False + for c in arg: + if ord(c) > 127: + continue + elif c.isspace(): + saw_space = True + elif c == "'": + saw_single_quote = True + elif c == '"': + saw_double_quote = True + if not saw_space and not saw_single_quote and not saw_double_quote: + buf.append(arg) + elif not saw_single_quote: + buf.append("'") + buf.append(arg) + buf.append("'") + elif not saw_double_quote: + buf.append('"') + buf.append(arg) + buf.append('"') + else: + raise ValueError( + f"Argument {arg} contains both single and double quotes and cannot be quoted" + ) + return "".join(buf) + + def main(argv): """ This is a wrapper script around the `go` binary. @@ -21,13 +56,11 @@ def main(argv): print("usage: go_wrapper.py ", file=sys.stderr) return 1 - wrapped_binary = Path(argv[1]) + wrapped_binary = Path(argv[1]).resolve() - parser = argparse.ArgumentParser() - parser.add_argument("--cc", action="append", default=[]) - parser.add_argument("--cgo_cflags", action="append", default=[]) - parser.add_argument("--cgo_cppflags", action="append", default=[]) - parser.add_argument("--cgo_ldflags", action="append", default=[]) + parser = argparse.ArgumentParser(fromfile_prefix_chars="@") + parser.add_argument("--workdir", type=Path, default=None) + parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout) parsed, unknown = parser.parse_known_args(argv[2:]) env = os.environ.copy() @@ -38,25 +71,22 @@ def main(argv): env["GOCACHE"] = os.path.realpath(env["BUCK_SCRATCH_PATH"]) cwd = os.getcwd() - if len(parsed.cc) > 0: - env["CC"] = " ".join([arg.replace("%cwd%", cwd) for arg in parsed.cc]) - - if len(parsed.cgo_cflags) > 0: - env["CGO_CFLAGS"] = " ".join( - [arg.replace("%cwd%", cwd) for arg in parsed.cgo_cflags] - ) - - if len(parsed.cgo_cppflags) > 0: - env["CGO_CPPFLAGS"] = " ".join( - [arg.replace("%cwd%", cwd) for arg in parsed.cgo_cppflags] - ) + for env_var in ["CC", "CGO_CFLAGS", "CGO_CPPFLAGS", "CGO_LDFLAGS"]: + if env_var in env: + # HACK: Split the value into a list of arguments then join them back. + # This is because buck encodes quoted args in a way `go` doesn't like, + # but `go_join` does it in a way that `go` expects. + var_value = go_join(shlex.split(env[env_var])) + # HACK: Replace %cwd% with the current working directory to make it work when `go` does `cd` to a tmp-dir. + env[env_var] = var_value.replace("%cwd%", cwd) - if len(parsed.cgo_ldflags) > 0: - env["CGO_LDFLAGS"] = " ".join( - [arg.replace("%cwd%", cwd) for arg in parsed.cgo_ldflags] - ) + unknown = [arg.replace("%cwd%", cwd) for arg in unknown] - return subprocess.call([wrapped_binary] + unknown, env=env) + retcode = subprocess.call( + [wrapped_binary] + unknown, env=env, cwd=parsed.workdir, stdout=parsed.output + ) + parsed.output.close() + return retcode sys.exit(main(sys.argv)) diff --git a/prelude/go/transitions/defs.bzl b/prelude/go/transitions/defs.bzl index 1ad7f390c0e..639a4efe95e 100644 --- a/prelude/go/transitions/defs.bzl +++ b/prelude/go/transitions/defs.bzl @@ -12,13 +12,13 @@ def _cgo_enabled_transition(platform, refs, attrs): constraints = platform.configuration.constraints # Cancel transition if the value already set - # to enable using configuration modifiers for overiding this option - cgo_enabled_setting = refs.cgo_enabled_auto[ConstraintValueInfo].setting + # to enable using configuration modifiers for overriding this option + cgo_enabled_setting = refs.cgo_enabled_true[ConstraintValueInfo].setting if cgo_enabled_setting.label in constraints: return platform if attrs.cgo_enabled == None: - cgo_enabled_ref = refs.cgo_enabled_auto + return platform elif attrs.cgo_enabled == True: cgo_enabled_ref = refs.cgo_enabled_true else: @@ -37,10 +37,24 @@ def _cgo_enabled_transition(platform, refs, attrs): configuration = new_cfg, ) -def _compile_shared_transition(platform, refs, _): - compile_shared_value = refs.compile_shared_value[ConstraintValueInfo] +def _race_transition(platform, refs, attrs): constraints = platform.configuration.constraints - constraints[compile_shared_value.setting.label] = compile_shared_value + + # Cancel transition if the value already set + # to enable using configuration modifiers for overriding this option + race_setting = refs.race_false[ConstraintValueInfo].setting + if race_setting.label in constraints: + return platform + + # change configuration only when we can't avoid it + if attrs.race == True: + race_ref = refs.race_true + else: + return platform + + race_value = race_ref[ConstraintValueInfo] + constraints[race_value.setting.label] = race_value + new_cfg = ConfigurationInfo( constraints = constraints, values = platform.configuration.values, @@ -51,22 +65,23 @@ def _compile_shared_transition(platform, refs, _): configuration = new_cfg, ) -def _race_transition(platform, refs, attrs): +def _asan_transition(platform, refs, attrs): constraints = platform.configuration.constraints # Cancel transition if the value already set - # to enable using configuration modifiers for overiding this option - race_setting = refs.race_false[ConstraintValueInfo].setting - if race_setting.label in constraints: + # to enable using configuration modifiers for overriding this option + asan_setting = refs.asan_false[ConstraintValueInfo].setting + if asan_setting.label in constraints: return platform - if attrs.race == True: - race_ref = refs.race_true + # change configuration only when we can't avoid it + if attrs.asan == True: + asan_ref = refs.asan_true else: - race_ref = refs.race_false + return platform - race_value = race_ref[ConstraintValueInfo] - constraints[race_value.setting.label] = race_value + asan_value = asan_ref[ConstraintValueInfo] + constraints[asan_value.setting.label] = asan_value new_cfg = ConfigurationInfo( constraints = constraints, @@ -82,7 +97,7 @@ def _coverage_mode_transition(platform, refs, attrs): constraints = platform.configuration.constraints # Cancel transition if the value already set - # to enable using configuration modifiers for overiding this option + # to enable using configuration modifiers for overriding this option coverage_mode_setting = refs.coverage_mode_set[ConstraintValueInfo].setting if coverage_mode_setting.label in constraints: return platform @@ -113,10 +128,14 @@ def _coverage_mode_transition(platform, refs, attrs): def _tags_transition(platform, refs, attrs): constraints = platform.configuration.constraints + + if not attrs.tags: + return platform + for tag in attrs.tags: ref_name = "tag_{}__value".format(tag) if not hasattr(refs, ref_name): - fail("Add tag '{}' to .buckconfig attrubute `go.allowed_tags` to allow using it".format(tag)) + fail("Add tag '{}' to .buckconfig attribute `go.allowed_tags` to allow using it".format(tag)) tag_value = getattr(refs, ref_name)[ConstraintValueInfo] constraints[tag_value.setting.label] = tag_value @@ -131,6 +150,31 @@ def _tags_transition(platform, refs, attrs): configuration = new_cfg, ) +def _force_mingw_on_windows(platform, refs, _): + constraints = platform.configuration.constraints + + abi_gnu_value = refs.abi_gnu[ConstraintValueInfo] + if abi_gnu_value.setting.label in constraints and constraints[abi_gnu_value.setting.label] == abi_gnu_value: + # Already MinGW/GNU, do nothing + return platform + + os_windows_value = refs.os_windows[ConstraintValueInfo] + if os_windows_value.setting.label in constraints and constraints[os_windows_value.setting.label] != os_windows_value: + # Non-Windows, do nothing + return platform + + constraints[abi_gnu_value.setting.label] = abi_gnu_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + def _chain_transitions(transitions): def tr(platform, refs, attrs): for t in transitions: @@ -139,10 +183,17 @@ def _chain_transitions(transitions): return tr -_tansitions = [_cgo_enabled_transition, _compile_shared_transition, _race_transition, _tags_transition] +_all_level_tansitions = [_force_mingw_on_windows] +_top_level_tansitions = [_asan_transition, _cgo_enabled_transition, _race_transition, _tags_transition] + _all_level_tansitions -_refs = { - "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", +_all_level_refs = { + "abi_gnu": "prelude//abi/constraints:gnu", + "os_windows": "prelude//os/constraints:windows", +} + +_top_level_refs = { + "asan_false": "prelude//go/constraints:asan_false", + "asan_true": "prelude//go/constraints:asan_true", "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", "race_false": "prelude//go/constraints:race_false", @@ -150,22 +201,19 @@ _refs = { } | { "tag_{}__value".format(tag): constrant_value for tag, constrant_value in tag_to_constrant_value().items() -} +} | _all_level_refs -_attrs = ["cgo_enabled", "race", "tags"] +_attrs = ["asan", "cgo_enabled", "race", "tags"] go_binary_transition = transition( - impl = _chain_transitions(_tansitions), - refs = _refs | { - "compile_shared_value": "prelude//go/constraints:compile_shared_false", - }, + impl = _chain_transitions(_top_level_tansitions), + refs = _top_level_refs, attrs = _attrs, ) go_test_transition = transition( - impl = _chain_transitions(_tansitions + [_coverage_mode_transition]), - refs = _refs | { - "compile_shared_value": "prelude//go/constraints:compile_shared_false", + impl = _chain_transitions(_top_level_tansitions + [_coverage_mode_transition]), + refs = _top_level_refs | { "coverage_mode_atomic": "prelude//go/constraints:coverage_mode_atomic", "coverage_mode_count": "prelude//go/constraints:coverage_mode_count", "coverage_mode_set": "prelude//go/constraints:coverage_mode_set", @@ -174,32 +222,41 @@ go_test_transition = transition( ) go_exported_library_transition = transition( - impl = _chain_transitions(_tansitions), - refs = _refs | { - "compile_shared_value": "prelude//go/constraints:compile_shared_true", - }, + impl = _chain_transitions(_top_level_tansitions), + refs = _top_level_refs, attrs = _attrs, ) +go_library_transition = transition( + impl = _chain_transitions(_all_level_tansitions), + refs = _all_level_refs, + attrs = [], +) + +go_stdlib_transition = transition( + impl = _chain_transitions(_all_level_tansitions), + refs = _all_level_refs, + attrs = [], +) + cgo_enabled_attr = attrs.default_only(attrs.option(attrs.bool(), default = select({ "DEFAULT": None, - "prelude//go/constraints:cgo_enabled_auto": None, "prelude//go/constraints:cgo_enabled_false": False, "prelude//go/constraints:cgo_enabled_true": True, }))) -compile_shared_attr = attrs.default_only(attrs.bool(default = select({ - "DEFAULT": False, - "prelude//go/constraints:compile_shared_false": False, - "prelude//go/constraints:compile_shared_true": True, -}))) - race_attr = attrs.default_only(attrs.bool(default = select({ "DEFAULT": False, "prelude//go/constraints:race_false": False, "prelude//go/constraints:race_true": True, }))) +asan_attr = attrs.default_only(attrs.bool(default = select({ + "DEFAULT": False, + "prelude//go/constraints:asan_false": False, + "prelude//go/constraints:asan_true": True, +}))) + coverage_mode_attr = attrs.default_only(attrs.option(attrs.enum(GoCoverageMode.values()), default = select({ "DEFAULT": None, "prelude//go/constraints:coverage_mode_atomic": "atomic", diff --git a/prelude/go/transitions/tags_helper.bzl b/prelude/go/transitions/tags_helper.bzl index 966fbb69d80..1acb328e411 100644 --- a/prelude/go/transitions/tags_helper.bzl +++ b/prelude/go/transitions/tags_helper.bzl @@ -10,14 +10,14 @@ load("@prelude//utils:buckconfig.bzl", "read_list") allowed_tags = read_list("go", "allowed_tags", default = [], root_cell = True) def tag_to_constrant_value(): - return {tag: "prelude//go/constraints:tag_{}__value".format(tag) for tag in allowed_tags} + return {tag: "prelude//go/tags/constraints:{}".format(tag) for tag in allowed_tags} def selects_for_tags(): selects = [] for tag in allowed_tags: selects += select({ "DEFAULT": [], - "prelude//go/constraints:tag_{}__value".format(tag): [tag], + "prelude//go/tags/constraints:{}".format(tag): [tag], }) return selects diff --git a/prelude/go_bootstrap/go_bootstrap.bzl b/prelude/go_bootstrap/go_bootstrap.bzl new file mode 100644 index 00000000000..48ec3cefb80 --- /dev/null +++ b/prelude/go_bootstrap/go_bootstrap.bzl @@ -0,0 +1,63 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") + +GoBootstrapToolchainInfo = provider( + fields = { + "env_go_arch": provider_field(str), + "env_go_os": provider_field(str), + "env_go_root": provider_field(typing.Any, default = None), + "go": provider_field(RunInfo), + "go_wrapper": provider_field(RunInfo), + }, +) + +def go_bootstrap_binary_impl(ctx: AnalysisContext) -> list[Provider]: + """ + Produces a Go binary for use in prelude. Similar to `python_bootstrap_binary` + It doesn't depend on other Go rules and uses `go build` under the hood. + CGo is disabled minimise dependencies. + """ + go_toolchain = ctx.attrs._go_bootstrap_toolchain[GoBootstrapToolchainInfo] + + target_is_win = go_toolchain.env_go_os == "windows" + exe_suffix = ".exe" if target_is_win else "" + output = ctx.actions.declare_output(ctx.label.name + exe_suffix) + + # Copy files, because go:embed doesn't work with symlinks + srcs_dir = ctx.actions.copied_dir( + "__srcs_dir__", + {paths.relativize(src.short_path, ctx.attrs.workdir): src for src in ctx.attrs.srcs}, + ) + + cmd = cmd_args([ + go_toolchain.go_wrapper, + go_toolchain.go, + ["--workdir", srcs_dir], + "build", + ["-o", cmd_args(output.as_output(), relative_to = srcs_dir)], + ctx.attrs.entrypoints, + ]) + + env = { + "CGO_ENABLED": "0", + "GO111MODULE": "", + "GOARCH": go_toolchain.env_go_arch, + "GOOS": go_toolchain.env_go_os, + "GOTOOLCHAIN": "local", + } + + if go_toolchain.env_go_root != None: + env["GOROOT"] = go_toolchain.env_go_root + + ctx.actions.run(cmd, env = env, category = "go_bootstrap_binary") + + return [ + DefaultInfo(default_output = output), + RunInfo(args = [output]), + ] diff --git a/prelude/go_bootstrap/tools/BUCK.v2 b/prelude/go_bootstrap/tools/BUCK.v2 new file mode 100644 index 00000000000..0312ccee3a0 --- /dev/null +++ b/prelude/go_bootstrap/tools/BUCK.v2 @@ -0,0 +1,36 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + +prelude = native + +prelude.go_bootstrap_binary( + name = "go_concat_files", + srcs = prelude.glob(["**/*"]), + entrypoints = [ + "go/concat_files.go", + ], + visibility = ["PUBLIC"], +) + +prelude.go_bootstrap_binary( + name = "go_go_wrapper", + srcs = prelude.glob(["**/*"]), + entrypoints = [ + "go/go_wrapper.go", + ], + visibility = ["PUBLIC"], +) + +prelude.go_bootstrap_binary( + name = "go_testmaingen", + srcs = prelude.glob(["**/*"]), + entrypoints = [ + "go/testmaingen.go", + ], + visibility = [ + "PUBLIC", + ], +) diff --git a/prelude/go_bootstrap/tools/README.md b/prelude/go_bootstrap/tools/README.md new file mode 100644 index 00000000000..c3dab4b0764 --- /dev/null +++ b/prelude/go_bootstrap/tools/README.md @@ -0,0 +1,8 @@ +# How to work with third-party deps? + +Use [`go` tool](https://go.dev/doc/modules/managing-dependencies) for that + +1. Add/Remove a dependency in your code +1. `cd buck2/prelude/go_bootstrap/tools` +1. `go mod tidy` - to resolve deps +1. `go mod vendor` - to save deps in the repo diff --git a/prelude/go_bootstrap/tools/go.mod b/prelude/go_bootstrap/tools/go.mod new file mode 100644 index 00000000000..6a72b589445 --- /dev/null +++ b/prelude/go_bootstrap/tools/go.mod @@ -0,0 +1,3 @@ +module tools + +go 1.22.5 diff --git a/prelude/go_bootstrap/tools/go/concat_files.go b/prelude/go_bootstrap/tools/go/concat_files.go new file mode 100644 index 00000000000..62190cad40a --- /dev/null +++ b/prelude/go_bootstrap/tools/go/concat_files.go @@ -0,0 +1,46 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under both the MIT license found in the + * LICENSE-MIT file in the root directory of this source tree and the Apache + * License, Version 2.0 found in the LICENSE-APACHE file in the root directory + * of this source tree. + */ + +package main + +import ( + "flag" + "io" + "log" + "os" +) + +func main() { + var outputFile = flag.String("output", "", "help message for flag n") + flag.Parse() + inputFiles := flag.Args() + + if *outputFile == "" || len(inputFiles) < 2 { + log.Fatal("usage: concat_files.go --output out.txt in1.txt in2.txt") + } + + f, err := os.Create(*outputFile) + if err != nil { + log.Fatal(os.Stderr, "Error creating output file: %v", err) + } + defer f.Close() + + for _, file := range inputFiles { + infile, err := os.Open(file) + if err != nil { + log.Fatal(os.Stderr, "Error opening input file %s: %v", file, err) + } + defer infile.Close() + + _, err = io.Copy(f, infile) + if err != nil { + log.Fatal("Error copying file %s: %v\n", file, err) + } + } +} diff --git a/prelude/go_bootstrap/tools/go/go_wrapper.go b/prelude/go_bootstrap/tools/go/go_wrapper.go new file mode 100644 index 00000000000..715fa4f67d6 --- /dev/null +++ b/prelude/go_bootstrap/tools/go/go_wrapper.go @@ -0,0 +1,236 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under both the MIT license found in the + * LICENSE-MIT file in the root directory of this source tree and the Apache + * License, Version 2.0 found in the LICENSE-APACHE file in the root directory + * of this source tree. + */ + +package main + +import ( + "bufio" + "flag" + "fmt" + "log" + "os" + "os/exec" + "path/filepath" + "strings" + "unicode" +) + +// GPT-4 implementation of Python's shlex.split +func shellSplit(input string) ([]string, error) { + var result []string + reader := strings.NewReader(input) + scanner := bufio.NewScanner(reader) + scanner.Split(bufio.ScanWords) + var token strings.Builder + inQuotes := false + var quoteChar rune + appendToken := func() { + if token.Len() > 0 { + result = append(result, token.String()) + token.Reset() + } + } + for scanner.Scan() { + word := scanner.Text() + for _, r := range word { + switch { + case r == '\'' || r == '"': + if inQuotes { + if r == quoteChar { + inQuotes = false + quoteChar = 0 + appendToken() + continue + } + } else { + inQuotes = true + quoteChar = r + continue + } + case unicode.IsSpace(r): + if !inQuotes { + appendToken() + continue + } + } + token.WriteRune(r) + } + if !inQuotes { + appendToken() + } + } + if inQuotes { + return nil, fmt.Errorf("unclosed quote in input: %s", input) + } + if err := scanner.Err(); err != nil { + return nil, err + } + return result, nil +} + +// A copy of https://github.com/golang/go/blob/go1.23.0/src/cmd/internal/quoted/quoted.go#L65 +func join(args []string) (string, error) { + var buf []byte + for i, arg := range args { + if i > 0 { + buf = append(buf, ' ') + } + var sawSpace, sawSingleQuote, sawDoubleQuote bool + for _, c := range arg { + switch { + case c > unicode.MaxASCII: + continue + case isSpaceByte(byte(c)): + sawSpace = true + case c == '\'': + sawSingleQuote = true + case c == '"': + sawDoubleQuote = true + } + } + switch { + case !sawSpace && !sawSingleQuote && !sawDoubleQuote: + buf = append(buf, arg...) + + case !sawSingleQuote: + buf = append(buf, '\'') + buf = append(buf, arg...) + buf = append(buf, '\'') + + case !sawDoubleQuote: + buf = append(buf, '"') + buf = append(buf, arg...) + buf = append(buf, '"') + + default: + return "", fmt.Errorf("argument %q contains both single and double quotes and cannot be quoted", arg) + } + } + return string(buf), nil +} + +// A copy of https://github.com/golang/go/blob/go1.23.0/src/cmd/internal/quoted/quoted.go#L15 +func isSpaceByte(c byte) bool { + return c == ' ' || c == '\t' || c == '\n' || c == '\r' +} + +func loadArgs(args []string) []string { + newArgs := make([]string, 0, 0) + for _, arg := range args { + if !strings.HasPrefix(arg, "@") { + newArgs = append(newArgs, arg) + } else { + file, _ := os.Open(arg[1:]) + defer file.Close() + scanner := bufio.NewScanner(file) + for scanner.Scan() { + newArgs = append(newArgs, scanner.Text()) + } + } + } + return newArgs +} + +func main() { + os.Args = loadArgs(os.Args) + var wrappedBinary = flag.String("go", "", "wrapped go binary") + var outputFile = flag.String("output", "", "file to redirect stdout to") + var workdir = flag.String("workdir", "", "directory to run the command in") + flag.Parse() + unknownArgs := flag.Args() + + if *wrappedBinary == "" { + log.Fatal("No wrapped binary specified") + } + + absWrappedBinary, err := filepath.Abs(*wrappedBinary) + if err != nil { + log.Fatal("Failed to resolve wrapped binary: %s", err) + } + + envs := make(map[string]string) + for _, e := range os.Environ() { + pair := strings.SplitN(e, "=", 2) + envs[pair[0]] = pair[1] + } + + if goroot, ok := envs["GOROOT"]; ok { + absGoroot, err := filepath.Abs(goroot) + if err != nil { + log.Fatal("Failed to resolve GOROOT: %s", err) + } + envs["GOROOT"] = absGoroot + } + + if buckScratchPath, ok := envs["BUCK_SCRATCH_PATH"]; ok { + absBuckScratchPath, err := filepath.Abs(buckScratchPath) + if err != nil { + log.Fatal("Failed to resolve BUCK_SCRATCH_PATH: %s", err) + } + envs["GOCACHE"] = absBuckScratchPath + } + + cwd, err := os.Getwd() + if err != nil { + log.Fatal("Failed to get current working directory: %s", err) + } + + for _, envVar := range []string{"CC", "CGO_CFLAGS", "CGO_CPPFLAGS", "CGO_LDFLAGS"} { + if value, ok := envs[envVar]; ok { + // HACK: Split the value into a list of arguments then join them back. + // This is because buck encodes quoted args in a way `go` doesn't like, + // but `join` does it in a way that `go` expects. + splitValue := strings.Split(value, "\t") + joinedValue, err := join(splitValue) + if err != nil { + log.Fatal("Failed to join %q: %s", envVar, err) + } + // HACK: Replace %cwd% with the current working directory to make it work when `go` does `cd` to a tmp-dir. + envs[envVar] = strings.ReplaceAll(joinedValue, "%cwd%", cwd) + } + } + for i, arg := range unknownArgs { + unknownArgs[i] = strings.ReplaceAll(arg, "%cwd%", cwd) + } + + var output *os.File + if *outputFile == "" { + output = os.Stdout + } else { + output, err = os.Create(*outputFile) + if err != nil { + log.Fatalf("Error creating output file: %s", err) + os.Exit(1) + } + defer output.Close() + } + + cmd := exec.Command(absWrappedBinary, unknownArgs...) + + cmd.Env = make([]string, 0, len(envs)/2) + for k, v := range envs { + cmd.Env = append(cmd.Env, k+"="+v) + } + + if *workdir != "" { + cmd.Dir = *workdir + } + + cmd.Stdout = output + cmd.Stderr = os.Stderr + err = cmd.Run() + if err != nil { + exitCode := 1 + if exitErr, ok := err.(*exec.ExitError); ok { + exitCode = exitErr.ExitCode() + } + fmt.Fprintln(os.Stderr, "Error running command:", err) + os.Exit(exitCode) + } +} diff --git a/prelude/go/tools/testmaingen.go b/prelude/go_bootstrap/tools/go/testmaingen.go similarity index 100% rename from prelude/go/tools/testmaingen.go rename to prelude/go_bootstrap/tools/go/testmaingen.go diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 3cfde48b421..222838398e0 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -12,11 +12,7 @@ load( ) load( "@prelude//haskell:library_info.bzl", - "HaskellLibraryInfo", -) -load( - "@prelude//haskell:link_info.bzl", - "merge_haskell_link_infos", + "HaskellLibraryInfoTSet", ) load( "@prelude//haskell:toolchain.bzl", @@ -35,6 +31,7 @@ load( "@prelude//linking:link_info.bzl", "LinkStyle", ) +load("@prelude//utils:argfile.bzl", "at_argfile") # The type of the return value of the `_compile()` function. CompileResultInfo = record( @@ -54,7 +51,7 @@ CompileArgsInfo = record( PackagesInfo = record( exposed_package_args = cmd_args, packagedb_args = cmd_args, - transitive_deps = field(list[HaskellLibraryInfo]), + transitive_deps = field(HaskellLibraryInfoTSet), ) def _package_flag(toolchain: HaskellToolchainInfo) -> str: @@ -71,39 +68,40 @@ def get_packages_info( haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] # Collect library dependencies. Note that these don't need to be in a - # particular order and we really want to remove duplicates (there - # are a *lot* of duplicates). - libs = {} + # particular order. direct_deps_link_info = attr_deps_haskell_link_infos(ctx) - merged_hs_link_info = merge_haskell_link_infos(direct_deps_link_info) - - hs_link_info = merged_hs_link_info.prof_info if enable_profiling else merged_hs_link_info.info - - for lib in hs_link_info[link_style]: - libs[lib.db] = lib # lib.db is a good enough unique key + libs = ctx.actions.tset( + HaskellLibraryInfoTSet, + children = [ + lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + for lib in direct_deps_link_info + ], + ) # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) exposed_package_args = cmd_args([package_flag, "base"]) packagedb_args = cmd_args() + packagedb_set = {} - for lib in libs.values(): - exposed_package_args.hidden(lib.import_dirs.values()) - exposed_package_args.hidden(lib.stub_dirs) + for lib in libs.traverse(): + packagedb_set[lib.db] = None + hidden_args = cmd_args(hidden = [ + lib.import_dirs.values(), + lib.stub_dirs, + # libs of dependencies might be needed at compile time if + # we're using Template Haskell: + lib.libs, + ]) - # libs of dependencies might be needed at compile time if - # we're using Template Haskell: - exposed_package_args.hidden(lib.libs) + exposed_package_args.add(hidden_args) - packagedb_args.hidden(lib.import_dirs.values()) - packagedb_args.hidden(lib.stub_dirs) - packagedb_args.hidden(lib.libs) + packagedb_args.add(hidden_args) - for lib in libs.values(): - # These we need to add for all the packages/dependencies, i.e. - # direct and transitive (e.g. `fbcode-common-hs-util-hs-array`) - packagedb_args.add("-package-db", lib.db) + # These we need to add for all the packages/dependencies, i.e. + # direct and transitive (e.g. `fbcode-common-hs-util-hs-array`) + packagedb_args.add([cmd_args("-package-db", x) for x in packagedb_set]) haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( ctx, @@ -122,7 +120,7 @@ def get_packages_info( return PackagesInfo( exposed_package_args = exposed_package_args, packagedb_args = packagedb_args, - transitive_deps = libs.values(), + transitive_deps = libs, ) def compile_args( @@ -198,14 +196,19 @@ def compile_args( if pkgname: compile_args.add(["-this-unit-id", pkgname]) - srcs = cmd_args() + arg_srcs = [] + hidden_srcs = [] for (path, src) in srcs_to_pairs(ctx.attrs.srcs): # hs-boot files aren't expected to be an argument to compiler but does need # to be included in the directory of the associated src file if is_haskell_src(path): - srcs.add(src) + arg_srcs.append(src) else: - srcs.hidden(src) + hidden_srcs.append(src) + srcs = cmd_args( + arg_srcs, + hidden = hidden_srcs, + ) producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags @@ -238,13 +241,12 @@ def compile( if args.args_for_file: if haskell_toolchain.use_argsfile: - argsfile = ctx.actions.declare_output( - "haskell_compile_" + artifact_suffix + ".argsfile", - ) - for_file = cmd_args(args.args_for_file).add(args.srcs) - ctx.actions.write(argsfile.as_output(), for_file, allow_args = True) - compile_cmd.add(cmd_args(argsfile, format = "@{}")) - compile_cmd.hidden(for_file) + compile_cmd.add(at_argfile( + actions = ctx.actions, + name = artifact_suffix + ".haskell_compile_argsfile", + args = [args.args_for_file, args.srcs], + allow_args = True, + )) else: compile_cmd.add(args.args_for_file) compile_cmd.add(args.srcs) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index e905bd9e376..320b8f9360b 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -20,8 +20,10 @@ load( load( "@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", + "LinkerType", "PicBehavior", ) +load("@prelude//cxx:groups.bzl", "get_dedupped_roots_from_groups") load( "@prelude//cxx:link_groups.bzl", "LinkGroupContext", @@ -31,6 +33,7 @@ load( "get_filtered_links", "get_link_group_info", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", "get_transitive_deps_matching_labels", "is_link_group_shlib", ) @@ -59,6 +62,7 @@ load( load( "@prelude//haskell:library_info.bzl", "HaskellLibraryInfo", + "HaskellLibraryInfoTSet", "HaskellLibraryProvider", ) load( @@ -67,7 +71,6 @@ load( "HaskellProfLinkInfo", "attr_link_style", "cxx_toolchain_link_style", - "merge_haskell_link_infos", ) load( "@prelude//haskell:toolchain.bzl", @@ -100,7 +103,6 @@ load( "LinkInfo", "LinkInfos", "LinkStyle", - "Linkage", "LinkedObject", "MergedLinkInfo", "SharedLibLinkable", @@ -129,13 +131,16 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "create_shared_libraries", + "create_shlib_symlink_tree", "merge_shared_libraries", "traverse_shared_library_info", ) +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//python:python.bzl", "PythonLibraryInfo", ) +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:set.bzl", "set") load("@prelude//utils:utils.bzl", "filter_and_map_idx", "flatten") @@ -248,7 +253,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: def archive_linkable(lib): return ArchiveLinkable( archive = Archive(artifact = lib), - linker_type = "gnu", + linker_type = LinkerType("gnu"), ) def shared_linkable(lib): @@ -266,18 +271,28 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: ] hlibinfos[link_style] = hlibinfo - hlinkinfos[link_style] = [hlibinfo] + hlinkinfos[link_style] = ctx.actions.tset( + HaskellLibraryInfoTSet, + value = hlibinfo, + children = [lib.info[link_style] for lib in haskell_infos], + ) prof_hlibinfos[link_style] = prof_hlibinfo - prof_hlinkinfos[link_style] = [prof_hlibinfo] + prof_hlinkinfos[link_style] = ctx.actions.tset( + HaskellLibraryInfoTSet, + value = prof_hlibinfo, + children = [lib.prof_info[link_style] for lib in haskell_infos], + ) link_infos[link_style] = LinkInfos( default = LinkInfo( pre_flags = ctx.attrs.exported_linker_flags, + post_flags = ctx.attrs.exported_post_linker_flags, linkables = linkables, ), ) prof_link_infos[link_style] = LinkInfos( default = LinkInfo( pre_flags = ctx.attrs.exported_linker_flags, + post_flags = ctx.attrs.exported_post_linker_flags, linkables = prof_linkables, ), ) @@ -317,6 +332,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: solibs = {} for soname, lib in ctx.attrs.shared_libs.items(): solibs[soname] = LinkedObject(output = lib, unstripped_output = lib) + shared_libs = create_shared_libraries(ctx, solibs) linkable_graph = create_linkable_graph( ctx, @@ -326,7 +342,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx = ctx, exported_deps = ctx.attrs.deps, link_infos = {_to_lib_output_style(s): v for s, v in link_infos.items()}, - shared_libs = solibs, + shared_libs = shared_libs, default_soname = None, ), ), @@ -335,7 +351,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: inherited_pp_info = cxx_inherited_preprocessor_infos(ctx.attrs.deps) own_pp_info = CPreprocessor( - relative_args = CPreprocessorArgs(args = flatten([["-isystem", d] for d in ctx.attrs.cxx_header_dirs])), + args = CPreprocessorArgs(args = flatten([["-isystem", d] for d in ctx.attrs.cxx_header_dirs])), ) return [ @@ -344,11 +360,11 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: cxx_merge_cpreprocessors(ctx, [own_pp_info], inherited_pp_info), merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, shared_library_infos, ), merge_link_group_lib_info(deps = ctx.attrs.deps), - merge_haskell_link_infos(haskell_infos + [haskell_link_infos]), + haskell_link_infos, merged_link_info, HaskellProfLinkInfo( prof_infos = prof_merged_link_info, @@ -359,12 +375,12 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: def _srcs_to_objfiles( ctx: AnalysisContext, odir: Artifact, - osuf: str) -> cmd_args: - objfiles = cmd_args() + osuf: str) -> list[Artifact]: + objfiles = [] for src, _ in srcs_to_pairs(ctx.attrs.srcs): # Don't link boot sources, as they're only meant to be used for compiling. if is_haskell_src(src): - objfiles.add(cmd_args([odir, "/", paths.replace_extension(src, "." + osuf)], delimiter = "")) + objfiles.append(odir.project(paths.replace_extension(src, "." + osuf))) return objfiles _REGISTER_PACKAGE = """\ @@ -408,10 +424,6 @@ def _make_package( # Don't expose boot sources, as they're only meant to be used for compiling. modules = [src_to_module_name(x) for x, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(x)] - uniq_hlis = {} - for x in hlis: - uniq_hlis[x.id] = x - if enable_profiling: # Add the `-p` suffix otherwise ghc will look for objects # following this logic (https://fburl.com/code/3gmobm5x) and will fail. @@ -440,36 +452,40 @@ def _make_package( "import-dirs:" + ", ".join(import_dirs), "library-dirs:" + ", ".join(library_dirs), "extra-libraries: " + libname, - "depends: " + ", ".join(uniq_hlis), + "depends: " + ", ".join([lib.id for lib in hlis]), ] pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + ".conf", conf) db = ctx.actions.declare_output("db-" + artifact_suffix) - db_deps = {} - for x in uniq_hlis.values(): - db_deps[repr(x.db)] = x.db + # While the list of hlis is unique, there may be multiple packages in the same db. + # Cutting down the GHC_PACKAGE_PATH significantly speeds up GHC. + db_deps = {x.db: None for x in hlis}.keys() # So that ghc-pkg can find the DBs for the dependencies. We might # be able to use flags for this instead, but this works. ghc_package_path = cmd_args( - db_deps.values(), + db_deps, delimiter = ":", ) haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] ctx.actions.run( - cmd_args([ - "sh", - "-c", - _REGISTER_PACKAGE, - "", - haskell_toolchain.packager, - db.as_output(), - pkg_conf, - ]).hidden(hi.values()).hidden(lib.values()), # needs hi, because ghc-pkg checks that the .hi files exist + cmd_args( + [ + "sh", + "-c", + _REGISTER_PACKAGE, + "", + haskell_toolchain.packager, + db.as_output(), + pkg_conf, + ], + # needs hi, because ghc-pkg checks that the .hi files exist + hidden = hi.values() + lib.values(), + ), category = "haskell_package_" + artifact_suffix.replace("-", "_"), - env = {"GHC_PACKAGE_PATH": ghc_package_path}, + env = {"GHC_PACKAGE_PATH": ghc_package_path} if db_deps else {}, ) return db @@ -482,10 +498,12 @@ HaskellLibBuildOutput = record( libs = list[Artifact], ) -def _get_haskell_shared_library_name_linker_flags(linker_type: str, soname: str) -> list[str]: - if linker_type == "gnu": +def _get_haskell_shared_library_name_linker_flags( + linker_type: LinkerType, + soname: str) -> list[str]: + if linker_type == LinkerType("gnu"): return ["-Wl,-soname,{}".format(soname)] - elif linker_type == "darwin": + elif linker_type == LinkerType("darwin"): # Passing `-install_name @rpath/...` or # `-Xlinker -install_name -Xlinker @rpath/...` instead causes # ghc-9.6.3: panic! (the 'impossible' happened) @@ -532,28 +550,31 @@ def _build_haskell_lib( lib_short_path = paths.join("lib-{}".format(artifact_suffix), libfile) linfos = [x.prof_info if enable_profiling else x.info for x in hlis] - uniq_infos = dedupe(flatten([x[link_style] for x in linfos])) + + # only gather direct dependencies + uniq_infos = [x[link_style].value for x in linfos] objfiles = _srcs_to_objfiles(ctx, compiled.objects, osuf) if link_style == LinkStyle("shared"): lib = ctx.actions.declare_output(lib_short_path) - link = cmd_args(haskell_toolchain.linker) - link.add(haskell_toolchain.linker_flags) - link.add(ctx.attrs.linker_flags) - link.add("-o", lib.as_output()) - link.add( - get_shared_library_flags(linker_info.type), - "-dynamic", - cmd_args( - _get_haskell_shared_library_name_linker_flags(linker_info.type, libfile), - prepend = "-optl", - ), + link = cmd_args( + [haskell_toolchain.linker] + + [haskell_toolchain.linker_flags] + + [ctx.attrs.linker_flags] + + ["-o", lib.as_output()] + + [ + get_shared_library_flags(linker_info.type), + "-dynamic", + cmd_args( + _get_haskell_shared_library_name_linker_flags(linker_info.type, libfile), + prepend = "-optl", + ), + ] + + [objfiles], + hidden = compiled.stubs, ) - link.add(objfiles) - link.hidden(compiled.stubs) - infos = get_link_args_for_strategy( ctx, nlis, @@ -574,7 +595,7 @@ def _build_haskell_lib( else: # static flavours # TODO: avoid making an archive for a single object, like cxx does # (but would that work with Template Haskell?) - archive = make_archive(ctx, lib_short_path, [compiled.objects], objfiles) + archive = make_archive(ctx, lib_short_path, objfiles) lib = archive.artifact libs = [lib] + archive.external_objects link_infos = LinkInfos( @@ -698,11 +719,19 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: if enable_profiling: prof_hlib_infos[link_style] = hlib - prof_hlink_infos[link_style] = [hlib] + prof_hlink_infos[link_style] = ctx.actions.tset( + HaskellLibraryInfoTSet, + value = hlib, + children = [li.prof_info[link_style] for li in hlis], + ) prof_link_infos[link_style] = hlib_build_out.link_infos else: hlib_infos[link_style] = hlib - hlink_infos[link_style] = [hlib] + hlink_infos[link_style] = ctx.actions.tset( + HaskellLibraryInfoTSet, + value = hlib, + children = [li.info[link_style] for li in hlis], + ) link_infos[link_style] = hlib_build_out.link_infos # Build the indices and create subtargets only once, with profiling @@ -729,6 +758,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage, pic_behavior, ) + shared_libs = create_shared_libraries(ctx, solibs) # TODO(cjhopman): this haskell implementation does not consistently handle LibOutputStyle # and LinkStrategy as expected and it's hard to tell what the intent of the existing code is @@ -771,7 +801,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage = preferred_linkage, exported_deps = ctx.attrs.deps, link_infos = {_to_lib_output_style(s): v for s, v in link_infos.items()}, - shared_libs = solibs, + shared_libs = shared_libs, # TODO(cjhopman): this should be set to non-None default_soname = None, ), @@ -804,10 +834,10 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: lib = hlib_infos, prof_lib = prof_hlib_infos, ), - merge_haskell_link_infos(hlis + [HaskellLinkInfo( + HaskellLinkInfo( info = hlink_infos, prof_info = prof_hlink_infos, - )]), + ), merged_link_info, HaskellProfLinkInfo( prof_infos = prof_merged_link_info, @@ -816,7 +846,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: cxx_merge_cpreprocessors(ctx, pp, inherited_pp_info), merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, shared_library_infos, ), haskell_haddock_lib(ctx, pkgname), @@ -861,7 +891,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: def derive_indexing_tset( actions: AnalysisActions, link_style: LinkStyle, - value: [Artifact, None], + value: Artifact | None, children: list[Dependency]) -> HaskellIndexingTSet: index_children = [] for dep in children: @@ -899,17 +929,20 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] output = ctx.actions.declare_output(ctx.attrs.name) - link = cmd_args(haskell_toolchain.compiler) - link.add("-o", output.as_output()) - link.add(haskell_toolchain.linker_flags) - link.add(ctx.attrs.linker_flags) + link = cmd_args( + [haskell_toolchain.compiler] + + ["-o", output.as_output()] + + [haskell_toolchain.linker_flags] + + [ctx.attrs.linker_flags], + hidden = compiled.stubs, + ) - link.hidden(compiled.stubs) + link_args = cmd_args() osuf, _hisuf = output_extensions(link_style, enable_profiling) objfiles = _srcs_to_objfiles(ctx, compiled.objects, osuf) - link.add(objfiles) + link_args.add(objfiles) indexing_tsets = {} if compiled.producing_indices: @@ -926,8 +959,9 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: deps = slis, ) - sos = {} + sos = [] + link_strategy = to_link_strategy(link_style) if link_group_info != None: own_binary_link_flags = [] auto_link_groups = {} @@ -940,14 +974,23 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: # in the prelude, the link group map will give us the link group libs. # Otherwise, pull them from the `LinkGroupLibInfo` provider from out deps. auto_link_group_specs = get_auto_link_group_specs(ctx, link_group_info) + executable_deps = [d.linkable_graph.nodes.value.label for d in link_deps if d.linkable_graph != None] + public_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_info.mappings, + executable_deps, + None, + ) if auto_link_group_specs != None: linked_link_groups = create_link_groups( ctx = ctx, + link_strategy = link_strategy, link_group_mappings = link_group_info.mappings, link_group_preferred_linkage = link_group_preferred_linkage, - executable_deps = [d.linkable_graph.nodes.value.label for d in link_deps if d.linkable_graph != None], + executable_deps = executable_deps, link_group_specs = auto_link_group_specs, linkable_graph_node_map = linkable_graph_node_map, + public_nodes = public_nodes, ) for name, linked_link_group in linked_link_groups.libs.items(): auto_link_groups[name] = linked_link_group.artifact @@ -966,15 +1009,11 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link_group_relevant_roots = find_relevant_roots( linkable_graph_node_map = linkable_graph_node_map, link_group_mappings = link_group_info.mappings, - roots = [ - mapping.root - for group in link_group_info.groups.values() - for mapping in group.mappings - if mapping.root != None - ], + roots = get_dedupped_roots_from_groups(link_group_info.groups.values()), ) - labels_to_links_map = get_filtered_labels_to_links_map( + labels_to_links = get_filtered_labels_to_links_map( + public_nodes = public_nodes, linkable_graph_node_map = linkable_graph_node_map, link_group = None, link_groups = link_group_info.groups, @@ -984,7 +1023,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: name: (lib.label, lib.shared_link_infos) for name, lib in link_group_libs.items() }, - link_strategy = to_link_strategy(link_style), + link_strategy = link_strategy, roots = ( [ d.linkable_graph.nodes.value.label @@ -1016,25 +1055,26 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: pre_flags = own_binary_link_flags, ), ) - link_infos.extend(get_filtered_links(labels_to_links_map, set(public_nodes))) + link_infos.extend(get_filtered_links(labels_to_links.map, set(public_nodes))) infos = LinkArgs(infos = link_infos) link_group_ctx = LinkGroupContext( link_group_mappings = link_group_info.mappings, link_group_libs = link_group_libs, link_group_preferred_linkage = link_group_preferred_linkage, - labels_to_links_map = labels_to_links_map, + labels_to_links_map = labels_to_links.map, + targets_consumed_by_link_groups = {}, ) - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): + for shared_lib in traverse_shared_library_info(shlib_info): label = shared_lib.label if is_link_group_shlib(label, link_group_ctx): - sos[name] = shared_lib.lib + sos.append(shared_lib) # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - sos.update(link_group_lib.shared_libs) + sos.extend(link_group_lib.shared_libs.libraries) else: nlis = [] @@ -1047,23 +1087,32 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: li = lib.get(MergedLinkInfo) if li != None: nlis.append(li) - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): - sos[name] = shared_lib.lib + sos.extend(traverse_shared_library_info(shlib_info)) infos = get_link_args_for_strategy(ctx, nlis, to_link_strategy(link_style)) - link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) + link_args.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) + link.add(at_argfile( + actions = ctx.actions, + name = "args.haskell_link_argsfile", + args = link_args, + allow_args = True, + )) ctx.actions.run(link, category = "haskell_link") - run = cmd_args(output) - if link_style == LinkStyle("shared") or link_group_info != None: sos_dir = "__{}__shared_libs_symlink_tree".format(ctx.attrs.name) rpath_ref = get_rpath_origin(get_cxx_toolchain_info(ctx).linker_info.type) rpath_ldflag = "-Wl,{}/{}".format(rpath_ref, sos_dir) link.add("-optl", "-Wl,-rpath", "-optl", rpath_ldflag) - symlink_dir = ctx.actions.symlinked_dir(sos_dir, {n: o.output for n, o in sos.items()}) - run.hidden(symlink_dir) + symlink_dir = create_shlib_symlink_tree( + actions = ctx.actions, + out = sos_dir, + shared_libs = sos, + ) + run = cmd_args(output, hidden = symlink_dir) + else: + run = cmd_args(output) providers = [ DefaultInfo(default_output = output), diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index c15efdb14e9..77556601225 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -37,13 +37,13 @@ load( "LinkArgs", "LinkInfo", "LinkStyle", - "Linkage", "get_lib_output_style", "set_linkable_link_whole", "to_link_strategy", ) load( "@prelude//linking:linkable_graph.bzl", + "LinkableGraph", "LinkableRootInfo", "create_linkable_graph", "get_deps_for_link", @@ -52,12 +52,15 @@ load( load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", + "create_shlib_symlink_tree", "traverse_shared_library_info", + "with_unique_str_sonames", ) +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal", - "breadth_first_traversal_by", + "depth_first_traversal", + "depth_first_traversal_by", ) load("@prelude//utils:utils.bzl", "flatten") @@ -177,11 +180,15 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: for nlabel, n in graph_nodes.items() } - all_direct_deps = [dep.label for dep in all_deps] + all_direct_deps = [] + for dep in all_deps: + graph = dep.get(LinkableGraph) + if graph: + all_direct_deps.append(graph.label) dep_graph[ctx.label] = all_direct_deps # Need to exclude all transitive deps of excluded deps - all_nodes_to_exclude = breadth_first_traversal( + all_nodes_to_exclude = depth_first_traversal( dep_graph, [dep.label for dep in preload_deps], ) @@ -226,7 +233,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: # This is not the final set of body nodes, because it still includes # nodes that don't support omnibus (e.g. haskell_library nodes) - breadth_first_traversal_by( + depth_first_traversal_by( dep_graph, [ctx.label], find_deps_for_body, @@ -269,7 +276,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: # Handle third-party dependencies of the omnibus SO tp_deps_shared_link_infos = {} - so_symlinks = {} + prebuilt_shlibs = [] for node_label in prebuilt_so_deps.keys(): node = graph_nodes[node_label] @@ -283,14 +290,14 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: shared_li = node.link_infos.get(output_style, None) if shared_li != None: tp_deps_shared_link_infos[node_label] = shared_li.default - for libname, linkObject in node.shared_libs.items(): - so_symlinks[libname] = linkObject.output + prebuilt_shlibs.extend(node.shared_libs.libraries) # Create symlinks to the TP dependencies' SOs so_symlinks_root_path = ctx.label.name + ".so-symlinks" - so_symlinks_root = ctx.actions.symlinked_dir( - so_symlinks_root_path, - so_symlinks, + so_symlinks_root = create_shlib_symlink_tree( + actions = ctx.actions, + out = so_symlinks_root_path, + shared_libs = prebuilt_shlibs, ) linker_info = get_cxx_toolchain_info(ctx).linker_info @@ -328,10 +335,10 @@ def _replace_macros_in_script_template( script_template: Artifact, haskell_toolchain: HaskellToolchainInfo, # Optional artifacts - ghci_bin: [Artifact, None] = None, - start_ghci: [Artifact, None] = None, - iserv_script: [Artifact, None] = None, - squashed_so: [Artifact, None] = None, + ghci_bin: Artifact | None = None, + start_ghci: Artifact | None = None, + iserv_script: Artifact | None = None, + squashed_so: Artifact | None = None, # Optional cmd_args exposed_package_args: [cmd_args, None] = None, packagedb_args: [cmd_args, None] = None, @@ -340,16 +347,16 @@ def _replace_macros_in_script_template( # Optional string args srcs: [str, None] = None, output_name: [str, None] = None, - ghci_iserv_path: [str, None] = None, + ghci_iserv_path: [Artifact, None] = None, preload_libs: [str, None] = None) -> Artifact: toolchain_paths = { BINUTILS_PATH: haskell_toolchain.ghci_binutils_path, - GHCI_LIB_PATH: haskell_toolchain.ghci_lib_path, + GHCI_LIB_PATH: haskell_toolchain.ghci_lib_path.get(DefaultInfo).default_outputs[0], CC_PATH: haskell_toolchain.ghci_cc_path, CPP_PATH: haskell_toolchain.ghci_cpp_path, CXX_PATH: haskell_toolchain.ghci_cxx_path, - GHCI_PACKAGER: haskell_toolchain.ghci_packager, - GHCI_GHC_PATH: haskell_toolchain.ghci_ghc_path, + GHCI_PACKAGER: haskell_toolchain.ghci_packager.get(DefaultInfo).default_outputs[0], + GHCI_GHC_PATH: haskell_toolchain.ghci_ghc_path.get(DefaultInfo).default_outputs[0], } if ghci_bin != None: @@ -363,7 +370,7 @@ def _replace_macros_in_script_template( replace_cmd = cmd_args(script_template_processor) replace_cmd.add(cmd_args(script_template, format = "--script_template={}")) for name, path in toolchain_paths.items(): - replace_cmd.add(cmd_args("--{}={}".format(name, path))) + replace_cmd.add(cmd_args(path, format = "--{}={{}}".format(name))) replace_cmd.add(cmd_args( final_script.as_output(), @@ -460,7 +467,7 @@ def _write_iserv_script( script_template = ghci_iserv_template, output_name = iserv_script_name, haskell_toolchain = haskell_toolchain, - ghci_iserv_path = ghci_iserv_path, + ghci_iserv_path = ghci_iserv_path.get(DefaultInfo).default_outputs[0], preload_libs = preload_libs, ) return iserv_script @@ -477,10 +484,10 @@ def _build_preload_deps_root( if SharedLibraryInfo in preload_dep: slib_info = preload_dep[SharedLibraryInfo] - shlib = traverse_shared_library_info(slib_info).items() + shlib = traverse_shared_library_info(slib_info) - for shlib_name, shared_lib in shlib: - preload_symlinks[shlib_name] = shared_lib.lib.output + for soname, shared_lib in with_unique_str_sonames(shlib).items(): + preload_symlinks[soname] = shared_lib.lib.output # TODO(T150785851): build or get SO for direct preload_deps # TODO(T150785851): find out why the only SOs missing are the ones from @@ -635,9 +642,9 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: packagedb_args = cmd_args(delimiter = " ") prebuilt_packagedb_args_set = {} - for lib in packages_info.transitive_deps: + for lib in packages_info.transitive_deps.traverse(): if lib.is_prebuilt: - prebuilt_packagedb_args_set[lib.db] = lib.db + prebuilt_packagedb_args_set[lib.db] = None else: lib_symlinks_root = paths.join( package_symlinks_root, @@ -667,7 +674,7 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: "packagedb", ), ) - prebuilt_packagedb_args = cmd_args(prebuilt_packagedb_args_set.values(), delimiter = " ") + prebuilt_packagedb_args = cmd_args(prebuilt_packagedb_args_set.keys(), delimiter = " ") script_templates = [] for script_template in ctx.attrs.extra_script_templates: @@ -718,7 +725,7 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: "__{}__".format(ctx.label.name), output_artifacts, ) - run = cmd_args(final_ghci_script).hidden(outputs) + run = cmd_args(final_ghci_script, hidden = outputs) return [ DefaultInfo(default_outputs = [root_output_dir]), diff --git a/prelude/haskell/haskell_haddock.bzl b/prelude/haskell/haskell_haddock.bzl index 4d498df2a5f..4154e3aba4f 100644 --- a/prelude/haskell/haskell_haddock.bzl +++ b/prelude/haskell/haskell_haddock.bzl @@ -15,6 +15,7 @@ load( "@prelude//haskell:util.bzl", "attr_deps", ) +load("@prelude//utils:argfile.bzl", "at_argfile") HaskellHaddockInfo = provider( fields = { @@ -70,14 +71,13 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: if args.args_for_file: if haskell_toolchain.use_argsfile: - argsfile = ctx.actions.declare_output( - "haskell_haddock.argsfile", - ) ghcargs = cmd_args(args.args_for_file, format = "--optghc={}") - fileargs = cmd_args(ghcargs).add(args.srcs) - ctx.actions.write(argsfile.as_output(), fileargs, allow_args = True) - cmd.add(cmd_args(argsfile, format = "@{}")) - cmd.hidden(fileargs) + cmd.add(at_argfile( + actions = ctx.actions, + name = "args.haskell_haddock_argsfile", + args = [ghcargs, args.srcs], + allow_args = True, + )) else: cmd.add(args.args_for_file) @@ -101,7 +101,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: ) ctx.actions.run( - cmd_args(script).hidden(cmd), + cmd_args(script, hidden = cmd), category = "haskell_haddock", no_outputs_cleanup = True, ) @@ -152,7 +152,7 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: ) ctx.actions.run( - cmd_args(script).hidden(script_args), + cmd_args(script, hidden = script_args), category = "haskell_haddock", no_outputs_cleanup = True, ) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index ea3cecc7018..754c703305e 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -27,7 +27,7 @@ load("@prelude//paths.bzl", "paths") # 1. Finding its owner target, if the input is a file # 2. Finding the target's "project", which involves a rdeps search # 3. Computing the project solution (flags, sources and dependencies) -# 4. Outputing the solution as JSON +# 4. Outputting the solution as JSON _HASKELL_BIN = "prelude//rules.bzl:haskell_binary" _HASKELL_IDE = "prelude//rules.bzl:haskell_ide" @@ -35,6 +35,8 @@ _HASKELL_LIB = "prelude//rules.bzl:haskell_library" linkStyle = LinkStyle("static") +configuration_modifiers = ["ovr_config//third-party/ghc/constraints:8.8.3"] + def _impl_target(ctx): target = ctx.cli_args.target project_universe = ctx.cli_args.project_universe @@ -95,7 +97,10 @@ def _find_project_and_solve(ctx, target, project_universe = []): def _find_target_in_universe(ctx, target, project_universe): for p in project_universe: - cfg_p = ctx.configured_targets(p) + cfg_p = ctx.configured_targets( + p, + modifiers = configuration_modifiers, + ) members = cfg_p.resolved_attrs_eager(ctx).include_projects for member in members: if target.label.raw_target() == member.label.raw_target(): @@ -124,7 +129,10 @@ def _solution_for_haskell_ide(ctx, target): results = [] deps = {} for dep in resolved_attrs.deps_query: - t = ctx.configured_targets(dep.label.raw_target()) + t = ctx.configured_targets( + dep.label.raw_target(), + modifiers = configuration_modifiers, + ) if (t.rule_type == _HASKELL_LIB or t.rule_type == _HASKELL_BIN): deps[dep.label] = t for lib in deps.values(): @@ -156,7 +164,7 @@ def _solution_for_haskell_lib(ctx, target, exclude): import_dirs = {} root = ctx.root() for key, item in resolved_attrs.srcs.items(): - # because BXL wont give you the path of an ensured artifact + # because BXL won't give you the path of an ensured artifact sp = get_path_without_materialization(item, ctx) (_, ext) = paths.split_extension(sp) diff = sp.removesuffix(paths.replace_extension(key, ext)) @@ -259,7 +267,7 @@ def _assembleSolution(ctx, linkStyle, result): for provider in result["haskell_deps"].values(): info = provider.info.get(linkStyle) if info != None: - for item in info: + for item in info.traverse(): if result["exclude_packages"].get(item.name) == None: hlis[item.name] = item for hli in hlis.values(): diff --git a/prelude/haskell/library_info.bzl b/prelude/haskell/library_info.bzl index 028496e7b52..3b048f13746 100644 --- a/prelude/haskell/library_info.bzl +++ b/prelude/haskell/library_info.bzl @@ -41,3 +41,5 @@ HaskellLibraryInfo = record( is_prebuilt = bool, profiling_enabled = bool, ) + +HaskellLibraryInfoTSet = transitive_set() diff --git a/prelude/haskell/link_info.bzl b/prelude/haskell/link_info.bzl index 8699a875e09..5cdc5f59265 100644 --- a/prelude/haskell/link_info.bzl +++ b/prelude/haskell/link_info.bzl @@ -9,6 +9,10 @@ load( "@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", ) +load( + "@prelude//haskell:library_info.bzl", + "HaskellLibraryInfoTSet", +) load( "@prelude//linking:link_info.bzl", "LinkStyle", @@ -18,8 +22,8 @@ load( HaskellLinkInfo = provider( # Contains a list of HaskellLibraryInfo records. fields = { - "info": provider_field(typing.Any, default = None), # dict[LinkStyle, list[HaskellLibraryInfo]] # TODO use a tset - "prof_info": provider_field(typing.Any, default = None), # dict[LinkStyle, list[HaskellLibraryInfo]] # TODO use a tset + "info": provider_field(dict[LinkStyle, HaskellLibraryInfoTSet]), + "prof_info": provider_field(dict[LinkStyle, HaskellLibraryInfoTSet]), }, ) @@ -32,24 +36,6 @@ HaskellProfLinkInfo = provider( }, ) -def merge_haskell_link_infos(deps: list[HaskellLinkInfo]) -> HaskellLinkInfo: - merged = {} - prof_merged = {} - for link_style in LinkStyle: - children = [] - prof_children = [] - for dep in deps: - if link_style in dep.info: - children.extend(dep.info[link_style]) - - if link_style in dep.prof_info: - prof_children.extend(dep.prof_info[link_style]) - - merged[link_style] = dedupe(children) - prof_merged[link_style] = dedupe(prof_children) - - return HaskellLinkInfo(info = merged, prof_info = prof_merged) - def cxx_toolchain_link_style(ctx: AnalysisContext) -> LinkStyle: return ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info.link_style diff --git a/prelude/haskell/tools/BUCK.v2 b/prelude/haskell/tools/BUCK.v2 index 48758abb9a3..3029719fcd6 100644 --- a/prelude/haskell/tools/BUCK.v2 +++ b/prelude/haskell/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( diff --git a/prelude/haskell/util.bzl b/prelude/haskell/util.bzl index 89545e7affa..80584cd3beb 100644 --- a/prelude/haskell/util.bzl +++ b/prelude/haskell/util.bzl @@ -65,23 +65,23 @@ def attr_deps(ctx: AnalysisContext) -> list[Dependency]: return ctx.attrs.deps + _by_platform(ctx, ctx.attrs.platform_deps) def attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: - return filter( + return dedupe(filter( None, [ d.get(HaskellLinkInfo) for d in attr_deps(ctx) + ctx.attrs.template_deps ], - ) + )) # DONT CALL THIS FUNCTION, you want attr_deps_haskell_link_infos instead def attr_deps_haskell_link_infos_sans_template_deps(ctx: AnalysisContext) -> list[HaskellLinkInfo]: - return filter( + return dedupe(filter( None, [ d.get(HaskellLinkInfo) for d in attr_deps(ctx) ], - ) + )) def attr_deps_haskell_lib_infos( ctx: AnalysisContext, @@ -98,13 +98,13 @@ def attr_deps_haskell_lib_infos( ] def attr_deps_merged_link_infos(ctx: AnalysisContext) -> list[MergedLinkInfo]: - return filter( + return dedupe(filter( None, [ d.get(MergedLinkInfo) for d in attr_deps(ctx) ], - ) + )) def attr_deps_profiling_link_infos(ctx: AnalysisContext) -> list[MergedLinkInfo]: return filter( diff --git a/prelude/http_archive/http_archive.bzl b/prelude/http_archive/http_archive.bzl index 599e271d6ed..cd04c269450 100644 --- a/prelude/http_archive/http_archive.bzl +++ b/prelude/http_archive/http_archive.bzl @@ -13,6 +13,7 @@ load(":exec_deps.bzl", "HttpArchiveExecDeps") # Flags to apply to decompress the various types of archives. _TAR_FLAGS = { "tar": [], + "tar.bz2": ["-j"], "tar.gz": ["-z"], "tar.xz": ["-J"], "tar.zst": ["--use-compress-program=unzstd"], @@ -194,14 +195,17 @@ def http_archive_impl(ctx: AnalysisContext) -> list[Provider]: [ cmd_args(script_output, format = mkdir), cmd_args(script_output, format = "cd {}"), - cmd_args([unarchive_cmd] + exclude_flags, delimiter = " ").relative_to(script_output), + cmd_args([unarchive_cmd] + exclude_flags, delimiter = " ", relative_to = script_output), ], is_executable = True, allow_args = True, ) ctx.actions.run( - cmd_args(interpreter + [script]).hidden(exclude_hidden + [archive, script_output.as_output()]), + cmd_args( + interpreter + [script], + hidden = exclude_hidden + [archive, script_output.as_output()], + ), category = "http_archive", prefer_local = prefer_local, ) diff --git a/prelude/http_archive/tools/BUCK.v2 b/prelude/http_archive/tools/BUCK.v2 index f08b7dcc79b..b91ae412d47 100644 --- a/prelude/http_archive/tools/BUCK.v2 +++ b/prelude/http_archive/tools/BUCK.v2 @@ -1,5 +1,10 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load("@prelude//http_archive/exec_deps.bzl", "http_archive_exec_deps") +oncall("build_infra") + +source_listing() + prelude = native http_archive_exec_deps( diff --git a/prelude/ide_integrations/xcode.bzl b/prelude/ide_integrations/xcode.bzl index f8f2cda0d3a..3434ac117a4 100644 --- a/prelude/ide_integrations/xcode.bzl +++ b/prelude/ide_integrations/xcode.bzl @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +XCODE_ARGSFILES_SUB_TARGET = "xcode-argsfiles" + XCODE_DATA_SUB_TARGET = "xcode-data" _XCODE_DATA_FILE_NAME = "xcode_data.json" @@ -12,18 +14,51 @@ XcodeDataInfo = provider(fields = { "data": provider_field(typing.Any, default = None), # {str: _a} }) +XcodeDataInfoKeys = struct( + ARCH = "arch", + ARGSFILES_BY_EXT = "argsfiles_by_ext", + BUNDLE_TYPE = "bundle_type", + CONTAINS_SWIFT_SOURCES = "contains_swift_sources", + DEFAULT_TARGET_PLATFORM = "default_target_platform", + DEPLOYMENT_VERSION = "deployment_version", + EXPORTED_HEADERS = "exported_headers", + EXTRA_XCODE_FILES = "extra_xcode_files", + HEADERS = "headers", + INFO_PLIST = "info_plist", + OUTPUT = "output", + PROCESSED_INFO_PLIST = "processed_info_plist", + INFO_PLIST_RELATIVE_PATH = "info_plist_relative_path", + PRODUCT_NAME = "product_name", + RULE_TYPE = "rule_type", + SDK = "sdk", + SRCS = "srcs", + SWIFT_VERSION = "swift_version", + TARGET = "target", + TEST_HOST_APP_BINARY = "test_host_app_binary", + TEST_TARGET = "test_target", + TEST_TYPE = "test_type", + XCTOOLCHAIN_BUNDLE_ID_TARGET = "xctoolchain_bundle_id_target", + XCTOOLCHAIN_BUNDLE_ID = "xctoolchain_bundle_id", + XCTOOLCHAIN_BUNDLE_TARGET = "xctoolchain_bundle_target", +) + def generate_xcode_data( ctx: AnalysisContext, rule_type: str, - output: [Artifact, None], + output: Artifact | None, populate_rule_specific_attributes_func: [typing.Callable, None] = None, **kwargs) -> (list[DefaultInfo], XcodeDataInfo): data = { - "rule_type": rule_type, - "target": ctx.label, + XcodeDataInfoKeys.RULE_TYPE: rule_type, + XcodeDataInfoKeys.TARGET: ctx.label, } if output: - data["output"] = output + data[XcodeDataInfoKeys.OUTPUT] = output + + data[XcodeDataInfoKeys.EXTRA_XCODE_FILES] = [] + if hasattr(ctx.attrs, "extra_xcode_files"): + data[XcodeDataInfoKeys.EXTRA_XCODE_FILES] = ctx.attrs.extra_xcode_files + if populate_rule_specific_attributes_func: data.update(populate_rule_specific_attributes_func(ctx, **kwargs)) diff --git a/prelude/java/class_to_srcs.bzl b/prelude/java/class_to_srcs.bzl index d253cab69c6..b026964ae63 100644 --- a/prelude/java/class_to_srcs.bzl +++ b/prelude/java/class_to_srcs.bzl @@ -7,11 +7,11 @@ load( "@prelude//java:java_toolchain.bzl", - "JavaTestToolchainInfo", # @unused Used as a type "JavaToolchainInfo", # @unused Used as a type ) +load("@prelude//utils:argfile.bzl", "at_argfile") -def _class_to_src_map_args(mapping: [Artifact, None]): +def _class_to_src_map_args(mapping: Artifact | None): if mapping != None: return cmd_args(mapping) return cmd_args() @@ -34,8 +34,8 @@ JavaClassToSourceMapInfo = provider( def create_class_to_source_map_info( ctx: AnalysisContext, - mapping: [Artifact, None] = None, - mapping_debuginfo: [Artifact, None] = None, + mapping: Artifact | None = None, + mapping_debuginfo: Artifact | None = None, deps = [Dependency]) -> JavaClassToSourceMapInfo: # Only generate debuginfo if the debug info tool is available. java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] @@ -73,22 +73,28 @@ def create_class_to_source_map_from_jar( name: str, java_toolchain: JavaToolchainInfo, jar: Artifact, - srcs: list[Artifact]) -> Artifact: + srcs: list[Artifact], + sources_jar_name: [str, None] = None) -> (Artifact, Artifact | None): output = actions.declare_output(name) cmd = cmd_args(java_toolchain.gen_class_to_source_map[RunInfo]) + if java_toolchain.gen_class_to_source_map_include_sourceless_compiled_packages != None: + for item in java_toolchain.gen_class_to_source_map_include_sourceless_compiled_packages: + cmd.add("-i", item) cmd.add("-o", output.as_output()) cmd.add(jar) - inputs_file = actions.write("class_to_srcs_map_argsfile.txt", srcs) - cmd.add(cmd_args(inputs_file, format = "@{}")) - cmd.hidden(srcs) + cmd.add(at_argfile(actions = actions, name = "class_to_srcs_map_argsfile.txt", args = srcs)) + sources_jar = None + if sources_jar_name: + sources_jar = actions.declare_output(sources_jar_name) + cmd.add("--sources_jar", sources_jar.as_output()) actions.run(cmd, category = "class_to_srcs_map") - return output + return (output, sources_jar) def maybe_create_class_to_source_map_debuginfo( actions: AnalysisActions, name: str, java_toolchain: JavaToolchainInfo, - srcs: list[Artifact]) -> [Artifact, None]: + srcs: list[Artifact]) -> Artifact | None: # Only generate debuginfo if the debug info tool is available. if java_toolchain.gen_class_to_source_map_debuginfo == None: return None @@ -97,34 +103,33 @@ def maybe_create_class_to_source_map_debuginfo( cmd = cmd_args(java_toolchain.gen_class_to_source_map_debuginfo[RunInfo]) cmd.add("gen") cmd.add("-o", output.as_output()) - inputs_file = actions.write("sourcefiles.txt", srcs) - cmd.add(cmd_args(inputs_file, format = "@{}")) - cmd.hidden(srcs) + cmd.add(at_argfile(actions = actions, name = "sourcefiles.txt", args = srcs)) actions.run(cmd, category = "class_to_srcs_map_debuginfo") return output def merge_class_to_source_map_from_jar( actions: AnalysisActions, name: str, - java_test_toolchain: JavaTestToolchainInfo, - mapping: [Artifact, None] = None, - relative_to: [CellRoot, None] = None, - # TODO(nga): I think this meant to be type, not default value. - deps = [JavaClassToSourceMapInfo.type]) -> Artifact: + java_toolchain: JavaToolchainInfo, + relative_to: [CellRoot, None], + deps: list[JavaClassToSourceMapInfo]) -> Artifact: output = actions.declare_output(name) - cmd = cmd_args(java_test_toolchain.merge_class_to_source_maps[RunInfo]) - cmd.add(cmd_args(output.as_output(), format = "--output={}")) - if relative_to != None: - cmd.add(cmd_args(str(relative_to), format = "--relative-to={}")) + tset = actions.tset( JavaClassToSourceMapTset, - value = mapping, + value = None, children = [d.tset for d in deps], ) class_to_source_files = tset.project_as_args("class_to_src_map") mappings_file = actions.write("class_to_src_map.txt", class_to_source_files) - cmd.add(["--mappings", mappings_file]) - cmd.hidden(class_to_source_files) + + cmd = cmd_args( + java_toolchain.merge_class_to_source_maps[RunInfo], + cmd_args(output.as_output(), format = "--output={}"), + cmd_args(str(relative_to), format = "--relative-to={}") if relative_to != None else [], + ["--mappings", mappings_file], + hidden = class_to_source_files, + ) actions.run(cmd, category = "merge_class_to_srcs_map") return output @@ -143,8 +148,7 @@ def _create_merged_debug_info( children = [tset_debuginfo], ) input_files = tset.project_as_args("class_to_src_map") - input_list_file = actions.write("debuginfo_list.txt", input_files) - cmd.add(cmd_args(input_list_file, format = "@{}")) - cmd.hidden(input_files) + cmd.add(at_argfile(actions = actions, name = "debuginfo_list.txt", args = input_files)) + actions.run(cmd, category = "merged_debuginfo") return output diff --git a/prelude/java/dex.bzl b/prelude/java/dex.bzl index d2fd2f6ddd2..16a0c768031 100644 --- a/prelude/java/dex.bzl +++ b/prelude/java/dex.bzl @@ -51,7 +51,7 @@ def get_dex_produced_from_java_library( else: desugar_deps_file = ctx.actions.write(prefix + "_desugar_deps_file.txt", desugar_deps) d8_cmd.add(["--classpath-files", desugar_deps_file]) - d8_cmd.hidden(desugar_deps) + d8_cmd.add(cmd_args(hidden = desugar_deps)) referenced_resources_file = ctx.actions.declare_output(prefix + "_referenced_resources.txt") d8_cmd.add(["--referenced-resources-path", referenced_resources_file.as_output()]) @@ -71,7 +71,7 @@ def get_dex_produced_from_java_library( identifier = "{}:{} {}".format(ctx.label.package, ctx.label.name, output_dex_file.short_path) ctx.actions.run( d8_cmd, - category = "d8", + category = "pre_dex", identifier = identifier, ) diff --git a/prelude/java/gwt_binary.bzl b/prelude/java/gwt_binary.bzl index 2c13c70a816..d5d86cc592b 100644 --- a/prelude/java/gwt_binary.bzl +++ b/prelude/java/gwt_binary.bzl @@ -14,7 +14,7 @@ load( "get_all_java_packaging_deps", ) -GWT_COMPILER_CLASS = "com.google.gwt.dev.Compiler" +_GWT_COMPILER_CLASS = "com.google.gwt.dev.Compiler" def gwt_binary_impl(ctx: AnalysisContext) -> list[Provider]: expect(ctx.attrs.local_workers > 0, "local workers must be greater than zero") @@ -35,7 +35,7 @@ def gwt_binary_impl(ctx: AnalysisContext) -> list[Provider]: ctx.attrs.vm_args, "-classpath", cmd_args(module_deps_classpath + deps_classpath, delimiter = get_path_separator_for_exec_os(ctx)), - GWT_COMPILER_CLASS, + _GWT_COMPILER_CLASS, "-war", output.as_output(), "-style", @@ -57,6 +57,8 @@ def gwt_binary_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions.run(gwt_args, category = "gwt_binary") + sub_targets = {"deploy": [DefaultInfo(default_output = deploy_output)]} + return [ - DefaultInfo(default_output = output), + DefaultInfo(default_output = output, sub_targets = sub_targets), ] diff --git a/prelude/java/java.bzl b/prelude/java/java.bzl index f3be03b5a9e..8ab08fcb5f6 100644 --- a/prelude/java/java.bzl +++ b/prelude/java/java.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "VALIDATION_DEPS_ATTR_NAME") load("@prelude//android:build_only_native_code.bzl", "is_build_only_native_code") load("@prelude//android:configuration.bzl", "is_building_android_binary_attr") load("@prelude//android:min_sdk_version.bzl", "get_min_sdk_version_constraint_value_name", "get_min_sdk_version_range") @@ -64,8 +65,8 @@ extra_attributes = { }, "java_library": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.dep(), sorted = True, default = []), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_dex_min_sdk_version": attrs.option(attrs.int(), default = dex_min_sdk_version()), "_dex_toolchain": toolchains_common.dex(), @@ -78,7 +79,7 @@ extra_attributes = { }, "java_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), + "java_agents": attrs.list(attrs.source(), default = []), "resources_root": attrs.option(attrs.string(), default = None), "test_class_names_file": attrs.option(attrs.source(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), @@ -94,6 +95,7 @@ extra_attributes = { }, "prebuilt_jar": { "generate_abi": attrs.bool(default = True), + "is_executable": attrs.bool(default = False), # Prebuilt jars are quick to build, and often contain third-party code, which in turn is # often a source of annotations and constants. To ease migration to ABI generation from # source without deps, we have them present during ABI gen by default. diff --git a/prelude/java/java_binary.bzl b/prelude/java/java_binary.bzl index d80498ef96c..a03b9156e77 100644 --- a/prelude/java/java_binary.bzl +++ b/prelude/java/java_binary.bzl @@ -22,32 +22,62 @@ load( "get_java_packaging_info", ) -def _generate_script(generate_wrapper: bool, native_libs: dict[str, SharedLibrary]) -> bool: +def _should_use_incremental_build(ctx: AnalysisContext): + # use incremental build only for __unstamped jars (which includes inner.jar) + return ctx.label.name.startswith("__unstamped") and ( + "incremental_build" in ctx.attrs.labels or read_config("java", "inc_build", "false").lower() == "true" + ) + +def _is_nested_package(ctx: AnalysisContext, pkg: str) -> bool: + return pkg == ctx.label.package or pkg.startswith(ctx.label.package + "/") + +def _get_dependencies_jars(ctx: AnalysisContext, package_deps: typing.Any) -> cmd_args: + jars = cmd_args() + for dep in package_deps.transitive_set.traverse(): + if dep.jar and not _is_nested_package(ctx, dep.label.package): + jars.add(dep.jar) + return jars + +def _get_incremental_jars(ctx: AnalysisContext, package_deps: typing.Any) -> cmd_args: + jars = cmd_args() + for dep in package_deps.transitive_set.traverse(): + if dep.jar and _is_nested_package(ctx, dep.label.package): + jars.add(dep.jar) + return jars + +def _generate_script(generate_wrapper: bool, native_libs: list[SharedLibrary]) -> bool: # if `generate_wrapper` is set and no native libs then it should be a wrapper script as result, # otherwise fat jar will be generated (inner jar or script will be included inside a final fat jar) return generate_wrapper and len(native_libs) == 0 def _create_fat_jar( ctx: AnalysisContext, - java_toolchain: JavaToolchainInfo, jars: cmd_args, - native_libs: dict[str, SharedLibrary], - do_not_create_inner_jar: bool, - generate_wrapper: bool) -> list[Artifact]: + native_libs: list[SharedLibrary], + name_prefix: str = "", + do_not_create_inner_jar: bool = True, + generate_wrapper: bool = False, + main_class: [str, None] = None, + append_jar: [Artifact, None] = None) -> list[Artifact]: + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] extension = "sh" if _generate_script(generate_wrapper, native_libs) else "jar" - output = ctx.actions.declare_output("{}.{}".format(ctx.label.name, extension)) + output = ctx.actions.declare_output("{}{}.{}".format(name_prefix, ctx.label.name, extension)) args = [ java_toolchain.fat_jar[RunInfo], "--jar_builder_tool", cmd_args(java_toolchain.jar_builder, delimiter = " "), + "--zip_scrubber_tool", + cmd_args(java_toolchain.zip_scrubber, delimiter = " "), "--output", output.as_output(), "--jars_file", - ctx.actions.write("jars_file", jars), + ctx.actions.write("{}jars_file".format(name_prefix), jars), ] - local_only = False + if append_jar: + args += ["--append_jar", append_jar] + if native_libs: expect( java_toolchain.is_bootstrap_toolchain == False, @@ -55,7 +85,7 @@ def _create_fat_jar( ) args += [ "--native_libs_file", - ctx.actions.write("native_libs", [cmd_args([so_name, native_lib.lib.output], delimiter = " ") for so_name, native_lib in native_libs.items()]), + ctx.actions.write("{}native_libs".format(name_prefix), [cmd_args([native_lib.soname.ensure_str(), native_lib.lib.output], delimiter = " ") for native_lib in native_libs]), ] if do_not_create_inner_jar: args += [ @@ -73,11 +103,6 @@ def _create_fat_jar( "nativelibs", ] - # TODO(T151045001) native deps are not compressed (for performance), but that can result in - # really large binaries. Large outputs can cause issues on RE, so we run locally instead. - local_only = "run_locally_if_has_native_deps" in ctx.attrs.labels - - main_class = ctx.attrs.main_class if main_class: args += ["--main_class", main_class] @@ -87,7 +112,7 @@ def _create_fat_jar( blocklist = ctx.attrs.blacklist if blocklist: - args += ["--blocklist", ctx.actions.write("blocklist_args", blocklist)] + args += ["--blocklist", ctx.actions.write("{}blocklist_args".format(name_prefix), blocklist)] if ctx.attrs.meta_inf_directory: args += ["--meta_inf_directory", ctx.attrs.meta_inf_directory] @@ -106,13 +131,15 @@ def _create_fat_jar( ] outputs.append(classpath_args_output) - fat_jar_cmd = cmd_args(args) - fat_jar_cmd.hidden(jars, [native_lib.lib.output for native_lib in native_libs.values()]) + fat_jar_cmd = cmd_args( + args, + hidden = [jars] + [native_lib.lib.output for native_lib in native_libs], + ) ctx.actions.run( fat_jar_cmd, - local_only = local_only, - category = "fat_jar", + local_only = False, + category = "{}fat_jar".format(name_prefix), allow_cache_upload = True, ) @@ -171,37 +198,72 @@ def java_binary_impl(ctx: AnalysisContext) -> list[Provider]: need_to_generate_wrapper = ctx.attrs.generate_wrapper == True do_not_create_inner_jar = ctx.attrs.do_not_create_inner_jar == True packaging_jar_args = packaging_info.packaging_deps.project_as_args("full_jar_args") - outputs = _create_fat_jar(ctx, java_toolchain, cmd_args(packaging_jar_args), native_deps, do_not_create_inner_jar, need_to_generate_wrapper) + main_class = ctx.attrs.main_class - main_artifact = outputs[0] other_outputs = [] + if _should_use_incremental_build(ctx): + # collect all dependencies + dependencies_jars = _get_dependencies_jars(ctx, packaging_jar_args) + + # collect nested targets + incremental_jars = _get_incremental_jars(ctx, packaging_jar_args) + + # generate intermediary jar only with dependencies + deps_outputs = _create_fat_jar( + ctx, + dependencies_jars, + native_deps, + name_prefix = "deps_", + ) + other_outputs = [deps_outputs[0]] + + # generate final jar appending modules to the dependencies jar + outputs = _create_fat_jar( + ctx, + incremental_jars, + native_deps, + do_not_create_inner_jar = do_not_create_inner_jar, + generate_wrapper = need_to_generate_wrapper, + main_class = main_class, + append_jar = deps_outputs[0], + ) + else: + outputs = _create_fat_jar( + ctx, + cmd_args(packaging_jar_args), + native_deps, + do_not_create_inner_jar = do_not_create_inner_jar, + generate_wrapper = need_to_generate_wrapper, + main_class = main_class, + ) + run_cmd = _get_run_cmd( attrs = ctx.attrs, script_mode = _generate_script(need_to_generate_wrapper, native_deps), - main_artifact = main_artifact, + main_artifact = outputs[0], java_toolchain = java_toolchain, ) if need_to_generate_wrapper: classpath_file = outputs[1] - run_cmd.hidden( + run_cmd.add(cmd_args(hidden = [ java_toolchain.java[RunInfo], classpath_file, packaging_jar_args, - ) + ])) other_outputs = [classpath_file] + [packaging_jar_args] + _get_java_tool_artifacts(java_toolchain) sub_targets = get_classpath_subtarget(ctx.actions, packaging_info) - class_to_src_map, _ = get_class_to_source_map_info( + class_to_src_map, _, _ = get_class_to_source_map_info( ctx, outputs = None, deps = ctx.attrs.deps, ) return [ - DefaultInfo(default_output = main_artifact, other_outputs = other_outputs, sub_targets = sub_targets), + DefaultInfo(default_output = outputs[0], other_outputs = other_outputs, sub_targets = sub_targets), RunInfo(args = run_cmd), create_template_info(ctx, packaging_info, first_order_libs), class_to_src_map, diff --git a/prelude/java/java_library.bzl b/prelude/java/java_library.bzl index 5c3571633d6..fb1ab544937 100644 --- a/prelude/java/java_library.bzl +++ b/prelude/java/java_library.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_providers.bzl", "merge_android_packageable_info") load( "@prelude//java:java_providers.bzl", @@ -17,6 +18,7 @@ load( "create_java_library_providers", "create_native_providers", "derive_compiling_deps", + "generate_java_classpath_snapshot", "make_compile_outputs", "to_list", ) @@ -42,9 +44,8 @@ _SUPPORTED_ARCHIVE_SUFFIXES = [".src.zip", "-sources.jar"] def _process_classpath( actions: AnalysisActions, classpath_args: cmd_args, - cmd: cmd_args, args_file_name: str, - option_name: str): + option_name: str) -> cmd_args: # write joined classpath string into args file classpath_args_file, _ = actions.write( args_file_name, @@ -52,13 +53,15 @@ def _process_classpath( allow_args = True, ) - # mark classpath artifacts as input - cmd.hidden(classpath_args) - - # add classpath args file to cmd - cmd.add(option_name, classpath_args_file) + return cmd_args( + option_name, + # add classpath args file to cmd + classpath_args_file, + # mark classpath artifacts as input + hidden = classpath_args, + ) -def classpath_args(ctx: AnalysisContext, args): +def _classpath_args(ctx: AnalysisContext, args): return cmd_args(args, delimiter = get_path_separator_for_exec_os(ctx)) def _process_plugins( @@ -66,8 +69,8 @@ def _process_plugins( actions_identifier: [str, None], annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], - javac_args: cmd_args, - cmd: cmd_args): + javac_args: cmd_args) -> cmd_args: + cmd = cmd_args() processors_classpath_tsets = [] # Process Annotation processors @@ -109,14 +112,15 @@ def _process_plugins( processors_classpath_tset = None if processors_classpath_tset: - processors_classpath = classpath_args(ctx, processors_classpath_tset.project_as_args("full_jar_args")) - _process_classpath( + processors_classpath = _classpath_args(ctx, processors_classpath_tset.project_as_args("full_jar_args")) + cmd.add(_process_classpath( ctx.actions, processors_classpath, - cmd, declare_prefixed_name("plugin_cp_args", actions_identifier), "--javac_processors_classpath_file", - ) + )) + + return cmd def _build_classpath(actions: AnalysisActions, deps: list[Dependency], additional_classpath_entries: list[Artifact], classpath_args_projection: str) -> [cmd_args, None]: compiling_deps_tset = derive_compiling_deps(actions, None, deps) @@ -132,12 +136,11 @@ def _build_classpath(actions: AnalysisActions, deps: list[Dependency], additiona def _build_bootclasspath(bootclasspath_entries: list[Artifact], source_level: int, java_toolchain: JavaToolchainInfo) -> list[Artifact]: bootclasspath_list = [] - if source_level in [7, 8]: + if source_level in [8]: if bootclasspath_entries: bootclasspath_list = bootclasspath_entries - elif source_level == 7: - bootclasspath_list = java_toolchain.bootclasspath_7 elif source_level == 8: + expect(java_toolchain.bootclasspath_8, "Must specify bootclasspath for source level 8") bootclasspath_list = java_toolchain.bootclasspath_8 return bootclasspath_list @@ -155,8 +158,8 @@ def _append_javac_params( extra_arguments: cmd_args, additional_classpath_entries: list[Artifact], bootclasspath_entries: list[Artifact], - cmd: cmd_args, - generated_sources_dir: Artifact): + generated_sources_dir: Artifact) -> cmd_args: + cmd = cmd_args() javac_args = cmd_args( "-encoding", "utf-8", @@ -168,13 +171,12 @@ def _append_javac_params( compiling_classpath = _build_classpath(ctx.actions, deps, additional_classpath_entries, "args_for_compiling") if compiling_classpath: - _process_classpath( + cmd.add(_process_classpath( ctx.actions, - classpath_args(ctx, compiling_classpath), - cmd, + _classpath_args(ctx, compiling_classpath), declare_prefixed_name("classpath_args", actions_identifier), "--javac_classpath_file", - ) + )) else: javac_args.add("-classpath ''") @@ -185,22 +187,20 @@ def _append_javac_params( bootclasspath_list = _build_bootclasspath(bootclasspath_entries, source_level, java_toolchain) if bootclasspath_list: - _process_classpath( + cmd.add(_process_classpath( ctx.actions, - classpath_args(ctx, bootclasspath_list), - cmd, + _classpath_args(ctx, bootclasspath_list), declare_prefixed_name("bootclasspath_args", actions_identifier), "--javac_bootclasspath_file", - ) + )) - _process_plugins( + cmd.add(_process_plugins( ctx, actions_identifier, annotation_processor_properties, javac_plugin_params, javac_args, - cmd, - ) + )) cmd.add("--generated_sources_dir", generated_sources_dir.as_output()) @@ -212,20 +212,22 @@ def _append_javac_params( javac_args, allow_args = True, ) - cmd.hidden(javac_args) + cmd.add(cmd_args(hidden = javac_args)) # mark plain srcs artifacts as input - cmd.hidden(plain_sources) + cmd.add(cmd_args(hidden = plain_sources)) cmd.add("--javac_args_file", args_file) if zipped_sources: cmd.add("--zipped_sources_file", ctx.actions.write(declare_prefixed_name("zipped_source_args", actions_identifier), zipped_sources)) - cmd.hidden(zipped_sources) + cmd.add(cmd_args(hidden = zipped_sources)) if remove_classes: cmd.add("--remove_classes", ctx.actions.write(declare_prefixed_name("remove_classes_args", actions_identifier), remove_classes)) + return cmd + def split_on_archives_and_plain_files( srcs: list[Artifact], plain_file_extensions: list[str]) -> (list[Artifact], list[Artifact]): @@ -275,13 +277,13 @@ def compile_to_jar( srcs: list[Artifact], *, abi_generation_mode: [AbiGenerationMode, None] = None, - output: [Artifact, None] = None, + output: Artifact | None = None, actions_identifier: [str, None] = None, javac_tool: [typing.Any, None] = None, resources: [list[Artifact], None] = None, resources_root: [str, None] = None, remove_classes: [list[str], None] = None, - manifest_file: [Artifact, None] = None, + manifest_file: Artifact | None = None, annotation_processor_properties: [AnnotationProcessorProperties, None] = None, plugin_params: [PluginParams, None] = None, source_level: [int, None] = None, @@ -291,9 +293,10 @@ def compile_to_jar( source_only_abi_deps: [list[Dependency], None] = None, extra_arguments: [cmd_args, None] = None, additional_classpath_entries: [list[Artifact], None] = None, - additional_compiled_srcs: [Artifact, None] = None, + additional_compiled_srcs: Artifact | None = None, bootclasspath_entries: [list[Artifact], None] = None, - is_creating_subtarget: bool = False) -> JavaCompileOutputs: + is_creating_subtarget: bool = False, + debug_port: [int, None] = None) -> JavaCompileOutputs: if not additional_classpath_entries: additional_classpath_entries = [] if not bootclasspath_entries: @@ -347,6 +350,7 @@ def compile_to_jar( bootclasspath_entries, is_building_android_binary, is_creating_subtarget, + debug_port, ) def _create_jar_artifact( @@ -355,13 +359,13 @@ def _create_jar_artifact( abi_generation_mode: [AbiGenerationMode, None], java_toolchain: JavaToolchainInfo, label: Label, - output: [Artifact, None], + output: Artifact | None, javac_tool: [typing.Any, None], srcs: list[Artifact], remove_classes: list[str], resources: list[Artifact], resources_root: [str, None], - manifest_file: [Artifact, None], + manifest_file: Artifact | None, annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], source_level: int, @@ -371,10 +375,11 @@ def _create_jar_artifact( _source_only_abi_deps: list[Dependency], extra_arguments: cmd_args, additional_classpath_entries: list[Artifact], - additional_compiled_srcs: [Artifact, None], + additional_compiled_srcs: Artifact | None, bootclasspath_entries: list[Artifact], _is_building_android_binary: bool, - _is_creating_subtarget: bool = False) -> JavaCompileOutputs: + _is_creating_subtarget: bool = False, + _debug_port: [int, None] = None) -> JavaCompileOutputs: """ Creates jar artifact. @@ -412,7 +417,7 @@ def _create_jar_artifact( generated_sources_dir = None if not skip_javac: generated_sources_dir = ctx.actions.declare_output(declare_prefixed_name("generated_sources", actions_identifier), dir = True) - _append_javac_params( + compile_and_package_cmd.add(_append_javac_params( ctx, actions_identifier, java_toolchain, @@ -426,9 +431,8 @@ def _create_jar_artifact( extra_arguments, additional_classpath_entries, bootclasspath_entries, - compile_and_package_cmd, generated_sources_dir, - ) + )) ctx.actions.run(compile_and_package_cmd, category = "javac_and_jar", identifier = actions_identifier) @@ -437,11 +441,14 @@ def _create_jar_artifact( has_postprocessor = hasattr(ctx.attrs, "jar_postprocessor") and ctx.attrs.jar_postprocessor final_jar = postprocess_jar(ctx.actions, ctx.attrs.jar_postprocessor[RunInfo], jar_out, actions_identifier) if has_postprocessor else jar_out + jar_snapshot = generate_java_classpath_snapshot(ctx.actions, java_toolchain.cp_snapshot_generator, abi or final_jar, actions_identifier) return make_compile_outputs( full_library = final_jar, + preprocessed_library = jar_out, class_abi = abi, required_for_source_only_abi = required_for_source_only_abi, annotation_processor_output = generated_sources_dir, + abi_jar_snapshot = jar_snapshot, ) def _check_dep_types(deps: list[Dependency]): @@ -463,6 +470,10 @@ def _check_exported_deps(exported_deps: list[Dependency], attr_name: str): "Exported deps are meant to be forwarded onto the classpath for dependents, so only " + "make sense for a target that emits Java bytecode, {} in {} does not.".format(exported_dep, attr_name), ) + expect( + not exported_dep[JavaLibraryInfo].may_not_be_exported, + "{} has 'may_not_be_exported' label and should not be present in {}.".format(exported_dep.label.raw_target(), attr_name), + ) # TODO(T145137403) remove need for this def _skip_java_library_dep_checks(ctx: AnalysisContext) -> bool: @@ -502,7 +513,11 @@ def java_library_impl(ctx: AnalysisContext) -> list[Provider]: _check_dep_types(ctx.attrs.exported_provided_deps) _check_dep_types(ctx.attrs.runtime_deps) - java_providers = build_java_library(ctx, ctx.attrs.srcs) + java_providers = build_java_library( + ctx = ctx, + srcs = ctx.attrs.srcs, + validation_deps_outputs = get_validation_deps_outputs(ctx), + ) return to_list(java_providers) + [android_packageable_info] @@ -512,10 +527,11 @@ def build_java_library( run_annotation_processors = True, additional_classpath_entries: list[Artifact] = [], bootclasspath_entries: list[Artifact] = [], - additional_compiled_srcs: [Artifact, None] = None, + additional_compiled_srcs: Artifact | None = None, generated_sources: list[Artifact] = [], override_abi_generation_mode: [AbiGenerationMode, None] = None, - extra_sub_targets: dict = {}) -> JavaProviders: + extra_sub_targets: dict = {}, + validation_deps_outputs: [list[Artifact], None] = None) -> JavaProviders: expect( not getattr(ctx.attrs, "_build_only_native_code", False), "Shouldn't call build_java_library if we're only building native code!", @@ -564,6 +580,7 @@ def build_java_library( "additional_compiled_srcs": additional_compiled_srcs, "annotation_processor_properties": annotation_processor_properties, "bootclasspath_entries": bootclasspath_entries, + "debug_port": getattr(ctx.attrs, "debug_port", None), "deps": first_order_deps, "javac_tool": derive_javac(ctx.attrs.javac) if ctx.attrs.javac else None, "manifest_file": manifest_file, @@ -577,10 +594,17 @@ def build_java_library( "target_level": target_level, } + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + extra_arguments = cmd_args( + ctx.attrs.extra_arguments, + hidden = validation_deps_outputs or [], + ) + outputs = compile_to_jar( ctx, plugin_params = plugin_params, - extra_arguments = cmd_args(ctx.attrs.extra_arguments), + extra_arguments = extra_arguments, **common_compile_kwargs ) @@ -626,7 +650,8 @@ def build_java_library( ctx.actions.write("gwt_entries.txt", entries), "--output", gwt_output.as_output(), - ).hidden(entries) + hidden = entries, + ) ctx.actions.run(gwt_cmd_args, category = "gwt_module") @@ -639,9 +664,18 @@ def build_java_library( DefaultInfo(default_output = all_generated_sources[0]), ]} - java_library_info, java_packaging_info, shared_library_info, cxx_resource_info, linkable_graph, template_placeholder_info, intellij_info = create_java_library_providers( + class_to_src_map, sources_jar, class_to_src_map_sub_targets = get_class_to_source_map_info( + ctx, + outputs = outputs, + deps = ctx.attrs.deps + deps_query + ctx.attrs.exported_deps, + generate_sources_jar = True, + ) + extra_sub_targets = extra_sub_targets | class_to_src_map_sub_targets + + java_library_info, java_packaging_info, global_code_info, shared_library_info, cxx_resource_info, linkable_graph, template_placeholder_info, intellij_info = create_java_library_providers( ctx, library_output = outputs.classpath_entry if outputs else None, + global_code_config = java_toolchain.global_code_config, declared_deps = ctx.attrs.deps + deps_query, exported_deps = ctx.attrs.exported_deps, provided_deps = ctx.attrs.provided_deps + provided_deps_query, @@ -650,16 +684,11 @@ def build_java_library( needs_desugar = source_level > 7 or target_level > 7, generated_sources = all_generated_sources, has_srcs = has_srcs, + sources_jar = sources_jar, gwt_module = gwt_output, + preprocessed_library = outputs.preprocessed_library if outputs else None, ) - class_to_src_map, class_to_src_map_sub_targets = get_class_to_source_map_info( - ctx, - outputs = outputs, - deps = ctx.attrs.deps + deps_query + ctx.attrs.exported_deps, - ) - extra_sub_targets = extra_sub_targets | class_to_src_map_sub_targets - default_info = get_default_info( ctx.actions, java_toolchain, @@ -671,6 +700,7 @@ def build_java_library( java_library_info = java_library_info, java_library_intellij_info = intellij_info, java_packaging_info = java_packaging_info, + java_global_code_info = global_code_info, shared_library_info = shared_library_info, cxx_resource_info = cxx_resource_info, linkable_graph = linkable_graph, diff --git a/prelude/java/java_providers.bzl b/prelude/java/java_providers.bzl index 2d2c6e6ac96..f67e03fb284 100644 --- a/prelude/java/java_providers.bzl +++ b/prelude/java/java_providers.bzl @@ -21,6 +21,10 @@ load( "merge_shared_libraries", ) load("@prelude//utils:expect.bzl", "expect") +load( + "@prelude//utils:utils.bzl", + "flatten", +) # JAVA PROVIDER DOCS # @@ -90,8 +94,9 @@ JavaClasspathEntry = record( abi = field(Artifact), # abi_as_dir is the abi .jar unzipped into a directory. If available, it is used to provide # .class level granularity for javacd and kotlincd dep-files. - abi_as_dir = field([Artifact, None]), + abi_as_dir = field(Artifact | None), required_for_source_only_abi = field(bool), + abi_jar_snapshot = field(Artifact | None), ) def _args_for_ast_dumper(entry: JavaClasspathEntry): @@ -125,15 +130,16 @@ JavaCompilingDepsTSet = transitive_set( JavaPackagingDep = record( label = Label, - jar = [Artifact, None], + jar = Artifact | None, dex = [DexLibraryInfo, None], - gwt_module = [Artifact, None], + gwt_module = Artifact | None, is_prebuilt_jar = bool, - proguard_config = [Artifact, None], + proguard_config = Artifact | None, # An output that is used solely by the system to have an artifact bound to the target (that the core can then use to find # the right target from the given artifact). output_for_classpath_macro = Artifact, + sources_jar = Artifact | None, ) def _full_jar_args(dep: JavaPackagingDep): @@ -157,6 +163,17 @@ JavaPackagingDepTSet = transitive_set( }, ) +JavaGlobalCodeInfo = provider( + doc = """This dictionary maps a framework key to its corresponding GlobalCodeConfig. The GlobalCodeConfig specifies the dependency .jars required by the framework for global-level code generation (binary level). + The process responsible for generating the global_code_info provider for the target utilizes this mapping to: + * Retrieve the GlobalCodeConfig associated with each framework, identified by its key. + * Determine whether the .jar files for the library or any of its dependencies are necessary for global code generation for that particular framework. + * Create a mapping from each framework key to a list of the required .jars identified in the previous step.""", + fields = { + "global_code_map": provider_field(typing.Any, default = None), # "{name: JavaCompilingDepsTSet}" + }, +) + JavaLibraryInfo = provider( doc = "Information about a java library and its dependencies", fields = { @@ -168,6 +185,9 @@ JavaLibraryInfo = provider( # An output of the library. If present then already included into `compiling_deps` field. "library_output": provider_field(typing.Any, default = None), # ["JavaClasspathEntry", None] + # Shows if the library can be exported or not + "may_not_be_exported": provider_field(typing.Any, default = None), + # An output that is used solely by the system to have an artifact bound to the target (that the core can then use to find # the right target from the given artifact). "output_for_classpath_macro": provider_field(typing.Any, default = None), # "artifact" @@ -175,7 +195,6 @@ JavaLibraryInfo = provider( ) JavaLibraryIntellijInfo = provider( - # @unsorted-dict-items doc = "Information about a java library that is required for Intellij project generation", fields = { # Directory containing external annotation jars @@ -183,6 +202,10 @@ JavaLibraryIntellijInfo = provider( # All the artifacts that were used in order to compile this library "compiling_classpath": provider_field(typing.Any, default = None), # ["artifact"] "generated_sources": provider_field(typing.Any, default = None), # ["artifact"] + "lint_jar": provider_field(typing.Any, default = None), # ["artifact"] + # If this library has a jar_postprocessor, this is the jar prior to post-processing. + # Otherwise, it is the same as library_output in JavaLibraryInfo. + "preprocessed_library": provider_field(typing.Any, default = None), # ["artifact", None] }, ) @@ -204,17 +227,20 @@ KeystoreInfo = provider( JavaCompileOutputs = record( full_library = Artifact, - class_abi = [Artifact, None], - source_abi = [Artifact, None], - source_only_abi = [Artifact, None], + class_abi = Artifact | None, + source_abi = Artifact | None, + source_only_abi = Artifact | None, classpath_entry = JavaClasspathEntry, - annotation_processor_output = [Artifact, None], + annotation_processor_output = Artifact | None, + preprocessed_library = Artifact, + incremental_state_dir = Artifact | None, ) JavaProviders = record( java_library_info = JavaLibraryInfo, java_library_intellij_info = JavaLibraryIntellijInfo, java_packaging_info = JavaPackagingInfo, + java_global_code_info = JavaGlobalCodeInfo, shared_library_info = SharedLibraryInfo, cxx_resource_info = ResourceInfo, linkable_graph = LinkableGraph, @@ -228,6 +254,7 @@ def to_list(java_providers: JavaProviders) -> list[Provider]: java_providers.java_library_info, java_providers.java_library_intellij_info, java_providers.java_packaging_info, + java_providers.java_global_code_info, java_providers.shared_library_info, java_providers.cxx_resource_info, java_providers.linkable_graph, @@ -242,13 +269,16 @@ def to_list(java_providers: JavaProviders) -> list[Provider]: # specific artifact to be used as the abi for the JavaClasspathEntry. def make_compile_outputs( full_library: Artifact, - class_abi: [Artifact, None] = None, - source_abi: [Artifact, None] = None, - source_only_abi: [Artifact, None] = None, - classpath_abi: [Artifact, None] = None, - classpath_abi_dir: [Artifact, None] = None, + preprocessed_library: Artifact, + class_abi: Artifact | None = None, + source_abi: Artifact | None = None, + source_only_abi: Artifact | None = None, + classpath_abi: Artifact | None = None, + classpath_abi_dir: Artifact | None = None, required_for_source_only_abi: bool = False, - annotation_processor_output: [Artifact, None] = None) -> JavaCompileOutputs: + annotation_processor_output: Artifact | None = None, + incremental_state_dir: Artifact | None = None, + abi_jar_snapshot: Artifact | None = None) -> JavaCompileOutputs: expect(classpath_abi != None or classpath_abi_dir == None, "A classpath_abi_dir should only be provided if a classpath_abi is provided!") return JavaCompileOutputs( full_library = full_library, @@ -260,8 +290,11 @@ def make_compile_outputs( abi = classpath_abi or class_abi or full_library, abi_as_dir = classpath_abi_dir, required_for_source_only_abi = required_for_source_only_abi, + abi_jar_snapshot = abi_jar_snapshot, ), annotation_processor_output = annotation_processor_output, + preprocessed_library = preprocessed_library, + incremental_state_dir = incremental_state_dir, ) def create_abi(actions: AnalysisActions, class_abi_generator: Dependency, library: Artifact) -> Artifact: @@ -281,6 +314,26 @@ def create_abi(actions: AnalysisActions, class_abi_generator: Dependency, librar ) return class_abi +def generate_java_classpath_snapshot(actions: AnalysisActions, snapshot_generator: Dependency | None, library: Artifact, action_identifier: str | None) -> Artifact | None: + if not snapshot_generator: + return None + identifier = ( + "{}_".format(action_identifier) if action_identifier else "" + ) + library.short_path.replace("/", "_").split(".")[0] + output = actions.declare_output("{}_jar_snapshot.bin".format(identifier)) + actions.run( + [ + snapshot_generator[RunInfo], + "--input-jar", + library, + "--output-snapshot", + output.as_output(), + ], + category = "jar_snapshot", + identifier = identifier, + ) + return output + # Accumulate deps necessary for compilation, which consist of this library's output and compiling_deps of its exported deps def derive_compiling_deps( actions: AnalysisActions, @@ -303,15 +356,16 @@ def derive_compiling_deps( def create_java_packaging_dep( ctx: AnalysisContext, - library_jar: [Artifact, None] = None, - output_for_classpath_macro: [Artifact, None] = None, + library_jar: Artifact | None = None, + output_for_classpath_macro: Artifact | None = None, needs_desugar: bool = False, desugar_deps: list[Artifact] = [], is_prebuilt_jar: bool = False, has_srcs: bool = True, + sources_jar: Artifact | None = None, dex_weight_factor: int = 1, - proguard_config: [Artifact, None] = None, - gwt_module: [Artifact, None] = None) -> JavaPackagingDep: + proguard_config: Artifact | None = None, + gwt_module: Artifact | None = None) -> JavaPackagingDep: dex_toolchain = getattr(ctx.attrs, "_dex_toolchain", None) if library_jar != None and has_srcs and dex_toolchain != None and ctx.attrs._dex_toolchain[DexToolchainInfo].d8_command != None: dex = get_dex_produced_from_java_library( @@ -335,6 +389,7 @@ def create_java_packaging_dep( is_prebuilt_jar = is_prebuilt_jar, proguard_config = proguard_config or getattr(ctx.attrs, "proguard_config", None), output_for_classpath_macro = output_for_classpath_macro or library_jar, + sources_jar = sources_jar, ) def get_all_java_packaging_deps(ctx: AnalysisContext, deps: list[Dependency]) -> list[JavaPackagingDep]: @@ -372,6 +427,86 @@ def get_java_packaging_info( packaging_deps = get_all_java_packaging_deps_tset(ctx, java_packaging_infos, java_packaging_dep) return JavaPackagingInfo(packaging_deps = packaging_deps) +def _create_java_compiling_deps_tset_for_global_code( + actions: AnalysisActions, + global_code_library: [JavaCompilingDepsTSet, None], + name: str, + global_code_infos: list[JavaGlobalCodeInfo]) -> [JavaCompilingDepsTSet, None]: + global_code_jars_kwargs = {} + global_code_jars_children = filter(None, [info.global_code_map.get(name, None) for info in global_code_infos]) + if global_code_library: + global_code_jars_children.append(global_code_library) + if global_code_jars_children: + global_code_jars_kwargs["children"] = global_code_jars_children + + return actions.tset(JavaCompilingDepsTSet, **global_code_jars_kwargs) if global_code_jars_kwargs else None + +# This function identifies and collects necessary dependencies that meet criteria defined in `GLOBAL_CODE_CONFIG` for global code generation across frameworks. +# It maps framework names to their corresponding Java compiling dependency sets. +# Example: Below configuration specifies criteria for the "di" framework: +# GLOBAL_CODE_CONFIG = { +# "di": ( +# triggers = ["//fbandroid/java/com/facebook/inject:inject"], +# deps = [], +# requires_first_order_classpath = False, +# ), +# } +# With this setup, if a target depends on "//fbandroid/java/com/facebook/inject:inject", the `global_code_info` provider for that target will have an entry under "di". +# This entry will be a JavaCompilingDepsTSet containing the .jar files associated with that target. +# Each framework (like "di") can use a Buck rule to identify dependencies with matching values for their framework key in the `global_code_info` provider. +# They can then compile all the .jars needed for global code generation. + +def get_global_code_info( + ctx: AnalysisContext, + declared_deps: list[Dependency], + packaging_deps: list[Dependency], + single_library_dep: [JavaCompilingDepsTSet, None], + library_compiling_deps: [JavaCompilingDepsTSet, None], + first_order_compiling_deps: [JavaCompilingDepsTSet, None], + global_code_config: dict) -> JavaGlobalCodeInfo: + global_code_infos = filter(None, [x.get(JavaGlobalCodeInfo) for x in packaging_deps]) + + def declared_deps_contains_trigger(deps_triggers: list[TargetLabel]): + for deps_trigger in deps_triggers: + for declared_dep in declared_deps: + if declared_dep.label.raw_target() == deps_trigger: + return True + + return False + + global_code_map = {} + for name, (config) in global_code_config.items(): + contains_trigger = declared_deps_contains_trigger(config.triggers) + target_is_global_code_dep = ctx.label.raw_target() in config.deps + if (contains_trigger or target_is_global_code_dep) and config.requires_first_order_classpath: + global_code_library_compiling_deps = first_order_compiling_deps + elif target_is_global_code_dep: + global_code_library_compiling_deps = library_compiling_deps + elif contains_trigger: + global_code_library_compiling_deps = single_library_dep + else: + global_code_library_compiling_deps = None + + global_code_tset = _create_java_compiling_deps_tset_for_global_code(ctx.actions, global_code_library_compiling_deps, name, global_code_infos) + if global_code_tset: + global_code_map[name] = global_code_tset + + return JavaGlobalCodeInfo(global_code_map = global_code_map) + +def propagate_global_code_info( + ctx: AnalysisContext, + packaging_deps: list[Dependency]) -> JavaGlobalCodeInfo: + global_code_map = {} + global_code_infos = filter(None, [x.get(JavaGlobalCodeInfo) for x in packaging_deps]) + keys = set(flatten([info.global_code_map.keys() for info in global_code_infos])) + + for key in keys: + global_code_tset = _create_java_compiling_deps_tset_for_global_code(ctx.actions, None, key, global_code_infos) + if global_code_tset: + global_code_map[key] = global_code_tset + + return JavaGlobalCodeInfo(global_code_map = global_code_map) + def create_native_providers(ctx: AnalysisContext, label: Label, packaging_deps: list[Dependency]) -> (SharedLibraryInfo, ResourceInfo, LinkableGraph): shared_library_info = merge_shared_libraries( ctx.actions, @@ -388,6 +523,7 @@ def create_native_providers(ctx: AnalysisContext, label: Label, packaging_deps: def _create_non_template_providers( ctx: AnalysisContext, library_output: [JavaClasspathEntry, None], + global_code_config, declared_deps: list[Dependency] = [], exported_deps: list[Dependency] = [], exported_provided_deps: list[Dependency] = [], @@ -396,8 +532,10 @@ def _create_non_template_providers( desugar_classpath: list[Artifact] = [], is_prebuilt_jar: bool = False, has_srcs: bool = True, - proguard_config: [Artifact, None] = None, - gwt_module: [Artifact, None] = None) -> (JavaLibraryInfo, JavaPackagingInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph): + sources_jar: Artifact | None = None, + proguard_config: Artifact | None = None, + gwt_module: Artifact | None = None, + first_order_compiling_deps: JavaCompilingDepsTSet | None = None) -> (JavaLibraryInfo, JavaPackagingInfo, JavaGlobalCodeInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph): """Creates java library providers of type `JavaLibraryInfo` and `JavaPackagingInfo`. Args: @@ -419,6 +557,7 @@ def _create_non_template_providers( desugar_classpath, is_prebuilt_jar, has_srcs, + sources_jar, proguard_config = proguard_config, gwt_module = gwt_module, ) @@ -429,13 +568,27 @@ def _create_non_template_providers( java_packaging_dep = java_packaging_dep, ) + compiling_deps = derive_compiling_deps(ctx.actions, library_output, exported_deps + exported_provided_deps) + + global_code_info = get_global_code_info( + ctx, + declared_deps, + packaging_deps, + derive_compiling_deps(ctx.actions, library_output, []), + compiling_deps, + first_order_compiling_deps, + global_code_config, + ) + return ( JavaLibraryInfo( - compiling_deps = derive_compiling_deps(ctx.actions, library_output, exported_deps + exported_provided_deps), + compiling_deps = compiling_deps, library_output = library_output, output_for_classpath_macro = output_for_classpath_macro, + may_not_be_exported = "may_not_be_exported" in (ctx.attrs.labels or []), ), java_packaging_info, + global_code_info, shared_library_info, cxx_resource_info, linkable_graph, @@ -451,6 +604,7 @@ def create_template_info(ctx: AnalysisContext, packaging_info: JavaPackagingInfo def create_java_library_providers( ctx: AnalysisContext, library_output: [JavaClasspathEntry, None], + global_code_config, declared_deps: list[Dependency] = [], exported_deps: list[Dependency] = [], provided_deps: list[Dependency] = [], @@ -459,20 +613,25 @@ def create_java_library_providers( needs_desugar: bool = False, is_prebuilt_jar: bool = False, has_srcs: bool = True, + sources_jar: Artifact | None = None, generated_sources: list[Artifact] = [], - annotation_jars_dir: [Artifact, None] = None, - proguard_config: [Artifact, None] = None, - gwt_module: [Artifact, None] = None) -> (JavaLibraryInfo, JavaPackagingInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph, TemplatePlaceholderInfo, JavaLibraryIntellijInfo): + annotation_jars_dir: Artifact | None = None, + proguard_config: Artifact | None = None, + gwt_module: Artifact | None = None, + lint_jar: Artifact | None = None, + preprocessed_library: Artifact | None = None) -> (JavaLibraryInfo, JavaPackagingInfo, JavaGlobalCodeInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph, TemplatePlaceholderInfo, JavaLibraryIntellijInfo): first_order_classpath_deps = filter(None, [x.get(JavaLibraryInfo) for x in declared_deps + exported_deps + runtime_deps]) first_order_classpath_libs = [dep.output_for_classpath_macro for dep in first_order_classpath_deps] compiling_deps = derive_compiling_deps(ctx.actions, None, declared_deps + exported_deps + provided_deps + exported_provided_deps) + first_order_compiling_deps = derive_compiling_deps(ctx.actions, library_output, declared_deps + exported_deps + provided_deps + exported_provided_deps) if library_output else compiling_deps compiling_classpath = [dep.full_library for dep in (list(compiling_deps.traverse()) if compiling_deps else [])] desugar_classpath = compiling_classpath if needs_desugar else [] - library_info, packaging_info, shared_library_info, cxx_resource_info, linkable_graph = _create_non_template_providers( + library_info, packaging_info, global_code_info, shared_library_info, cxx_resource_info, linkable_graph = _create_non_template_providers( ctx, library_output = library_output, + global_code_config = global_code_config, declared_deps = declared_deps, exported_deps = exported_deps, exported_provided_deps = exported_provided_deps, @@ -481,8 +640,10 @@ def create_java_library_providers( desugar_classpath = desugar_classpath, is_prebuilt_jar = is_prebuilt_jar, has_srcs = has_srcs, + sources_jar = sources_jar, proguard_config = proguard_config, gwt_module = gwt_module, + first_order_compiling_deps = first_order_compiling_deps, ) first_order_libs = first_order_classpath_libs + [library_info.library_output.full_library] if library_info.library_output else first_order_classpath_libs @@ -492,6 +653,8 @@ def create_java_library_providers( compiling_classpath = compiling_classpath, generated_sources = generated_sources, annotation_jars_dir = annotation_jars_dir, + lint_jar = lint_jar, + preprocessed_library = preprocessed_library, ) - return (library_info, packaging_info, shared_library_info, cxx_resource_info, linkable_graph, template_info, intellij_info) + return (library_info, packaging_info, global_code_info, shared_library_info, cxx_resource_info, linkable_graph, template_info, intellij_info) diff --git a/prelude/java/java_resources.bzl b/prelude/java/java_resources.bzl index 4916d589187..fbab61e4cb4 100644 --- a/prelude/java/java_resources.bzl +++ b/prelude/java/java_resources.bzl @@ -57,3 +57,18 @@ def get_resources_map( resource_name = get_src_package(java_toolchain.src_root_prefixes, java_toolchain.src_root_elements, full_resource) resources_to_copy[resource_name] = resource return resources_to_copy + +def parse_src_roots(src_roots: list[str]) -> (list[str], list[str]): + prefixes = [] + elements = [] + for src_root in src_roots: + if src_root.startswith("/"): + if not src_root.endswith("/"): + fail("Elements in java.src_roots config that begin with a / must end in one too, but {} does not".format(src_root)) + prefixes.append(src_root[1:]) + elif "/" in src_root: + fail("No / is permitted in java.src_roots config elements, but {} has one".format(src_root)) + else: + elements.append(src_root) + + return elements, prefixes diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index ba671891695..0e133c67296 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -14,11 +14,18 @@ load("@prelude//java:java_library.bzl", "build_java_library") load("@prelude//java:java_providers.bzl", "JavaLibraryInfo", "JavaPackagingInfo", "get_all_java_packaging_deps_tset") load("@prelude//java:java_toolchain.bzl", "JavaTestToolchainInfo", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") -load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraryInfo", + "create_shlib_symlink_tree", + "merge_shared_libraries", + "traverse_shared_library_info", +) load( "@prelude//tests:re_utils.bzl", "get_re_executors_from_props", ) +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:expect.bzl", "expect") load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") @@ -46,11 +53,12 @@ def build_junit_test( extra_cmds: list = [], extra_classpath_entries: list[Artifact] = []) -> ExternalRunnerTestInfo: java_test_toolchain = ctx.attrs._java_test_toolchain[JavaTestToolchainInfo] + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] - java = ctx.attrs.java[RunInfo] if ctx.attrs.java else ctx.attrs._java_toolchain[JavaToolchainInfo].java_for_tests + java = ctx.attrs.java[RunInfo] if ctx.attrs.java else java_toolchain.java_for_tests cmd = [java] + extra_cmds + ctx.attrs.vm_args + ["-XX:-MaxFDLimit"] - if len(java_test_toolchain.jvm_args) > 0: + if java_test_toolchain.jvm_args: cmd.extend(java_test_toolchain.jvm_args) classpath = [] @@ -60,6 +68,8 @@ def build_junit_test( cmd.extend(java_test_toolchain.java_custom_class_loader_vm_args) classpath.append(java_test_toolchain.java_custom_class_loader_library_jar) + cmd.append(cmd_args(ctx.attrs.java_agents, format = "-javaagent:{}")) + classpath.extend( [java_test_toolchain.test_runner_library_jar] + [ @@ -78,14 +88,12 @@ def build_junit_test( re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote - # excution options were specified. + # execution options were specified. run_from_cell_root = "buck2_run_from_cell_root" in labels uses_java8 = "run_with_java8" in labels - classpath_args = cmd_args() - if run_from_cell_root: - classpath_args.relative_to(ctx.label.cell_root) + relative_to = {"relative_to": ctx.label.cell_root} if run_from_cell_root else {} if uses_java8: # Java 8 does not support using argfiles, and these tests can have huge classpaths so we need another @@ -94,16 +102,25 @@ def build_junit_test( # to the "FileClassPathRunner" as a system variable. The "FileClassPathRunner" then loads all the jars # from that file onto the classpath, and delegates running the test to the junit test runner. cmd.extend(["-classpath", cmd_args(java_test_toolchain.test_runner_library_jar)]) - classpath_args.add(cmd_args(classpath)) + classpath_args = cmd_args( + cmd_args(classpath), + **relative_to + ) classpath_args_file = ctx.actions.write("classpath_args_file", classpath_args) - cmd.append(cmd_args(classpath_args_file, format = "-Dbuck.classpath_file={}").hidden(classpath_args)) + cmd.append(cmd_args( + classpath_args_file, + format = "-Dbuck.classpath_file={}", + hidden = classpath_args, + )) else: # Java 9+ supports argfiles, so just write the classpath to an argsfile. "FileClassPathRunner" will delegate # immediately to the junit test runner. - classpath_args.add("-classpath") - classpath_args.add(cmd_args(classpath, delimiter = get_path_separator_for_exec_os(ctx))) - classpath_args_file = ctx.actions.write("classpath_args_file", classpath_args) - cmd.append(cmd_args(classpath_args_file, format = "@{}").hidden(classpath_args)) + classpath_args = cmd_args( + "-classpath", + cmd_args(classpath, delimiter = get_path_separator_for_exec_os(ctx)), + **relative_to + ) + cmd.append(at_argfile(actions = ctx.actions, name = "classpath_args_file", args = classpath_args)) if (ctx.attrs.test_type == "junit5"): cmd.extend(java_test_toolchain.junit5_test_runner_main_class_args) @@ -113,7 +130,7 @@ def build_junit_test( cmd.extend(java_test_toolchain.junit_test_runner_main_class_args) if ctx.attrs.test_case_timeout_ms: - cmd.extend(["--default_test_timeout", str(ctx.attrs.test_case_timeout_ms)]) + cmd.extend(["--default-test-timeout", str(ctx.attrs.test_case_timeout_ms)]) if ctx.attrs.test_class_names_file: class_names = ctx.attrs.test_class_names_file @@ -128,7 +145,7 @@ def build_junit_test( ctx.actions.write("sources.txt", ctx.attrs.srcs), "--output", class_names.as_output(), - ]).hidden(ctx.attrs.srcs) + ], hidden = ctx.attrs.srcs) ctx.actions.run(list_class_names_cmd, category = "list_class_names") cmd.extend(["--test-class-names-file", class_names]) @@ -145,12 +162,12 @@ def build_junit_test( transitive_class_to_src_map = merge_class_to_source_map_from_jar( actions = ctx.actions, name = ctx.label.name + ".transitive_class_to_src.json", - java_test_toolchain = java_test_toolchain, + java_toolchain = java_toolchain, relative_to = ctx.label.cell_root if run_from_cell_root else None, deps = [tests_class_to_source_info], ) if run_from_cell_root: - transitive_class_to_src_map = cmd_args(transitive_class_to_src_map).relative_to(ctx.label.cell_root) + transitive_class_to_src_map = cmd_args(transitive_class_to_src_map, relative_to = ctx.label.cell_root) env["JACOCO_CLASSNAME_SOURCE_MAP"] = transitive_class_to_src_map test_info = ExternalRunnerTestInfo( @@ -180,8 +197,10 @@ def _get_native_libs_env(ctx: AnalysisContext) -> dict: deps = shared_library_infos, ) - native_linkables = traverse_shared_library_info(shared_library_info) - cxx_library_symlink_tree_dict = {so_name: shared_lib.lib.output for so_name, shared_lib in native_linkables.items()} - cxx_library_symlink_tree = ctx.actions.symlinked_dir("cxx_library_symlink_tree", cxx_library_symlink_tree_dict) + cxx_library_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = "cxx_library_symlink_tree", + shared_libs = traverse_shared_library_info(shared_library_info), + ) return {"BUCK_LD_SYMLINK_TREE": cxx_library_symlink_tree} diff --git a/prelude/java/java_toolchain.bzl b/prelude/java/java_toolchain.bzl index 82ab7517fc5..1c06a9a5589 100644 --- a/prelude/java/java_toolchain.bzl +++ b/prelude/java/java_toolchain.bzl @@ -28,11 +28,14 @@ JavaToolchainInfo = provider( "class_abi_generator": provider_field(typing.Any, default = None), "class_loader_bootstrapper": provider_field(typing.Any, default = None), "compile_and_package": provider_field(typing.Any, default = None), + "cp_snapshot_generator": provider_field(typing.Any, default = None), "dep_files": provider_field(typing.Any, default = None), "fat_jar": provider_field(typing.Any, default = None), "fat_jar_main_class_lib": provider_field(typing.Any, default = None), "gen_class_to_source_map": provider_field(typing.Any, default = None), "gen_class_to_source_map_debuginfo": provider_field(typing.Any, default = None), # optional + "gen_class_to_source_map_include_sourceless_compiled_packages": provider_field(typing.Any, default = None), + "global_code_config": provider_field(typing.Any, default = None), "graalvm_java": provider_field(typing.Any, default = None), "is_bootstrap_toolchain": provider_field(typing.Any, default = None), "jar": provider_field(typing.Any, default = None), @@ -48,6 +51,7 @@ JavaToolchainInfo = provider( "javacd_jvm_args_target": provider_field(typing.Any, default = None), "javacd_main_class": provider_field(typing.Any, default = None), "javacd_worker": provider_field(typing.Any, default = None), + "merge_class_to_source_maps": provider_field(typing.Any, default = None), "nullsafe": provider_field(typing.Any, default = None), "nullsafe_extra_args": provider_field(typing.Any, default = None), "nullsafe_signatures": provider_field(typing.Any, default = None), @@ -71,7 +75,6 @@ JavaTestToolchainInfo = provider( "junit_test_runner_main_class_args": provider_field(typing.Any, default = None), "jvm_args": provider_field(typing.Any, default = None), "list_class_names": provider_field(typing.Any, default = None), - "merge_class_to_source_maps": provider_field(typing.Any, default = None), "test_runner_library_jar": provider_field(typing.Any, default = None), "testng_test_runner_main_class_args": provider_field(typing.Any, default = None), "use_java_custom_class_loader": provider_field(typing.Any, default = None), @@ -85,6 +88,9 @@ PrebuiltJarToolchainInfo = provider( doc = "prebuilt_jar toolchain info", fields = { "class_abi_generator": provider_field(typing.Any, default = None), + "cp_snapshot_generator": provider_field(typing.Any, default = None), + "global_code_config": provider_field(typing.Any, default = None), "is_bootstrap_toolchain": provider_field(typing.Any, default = None), + "java": provider_field(typing.Any, default = None), }, ) diff --git a/prelude/java/javacd_jar_creator.bzl b/prelude/java/javacd_jar_creator.bzl index 8b8e582ce9a..5592c072627 100644 --- a/prelude/java/javacd_jar_creator.bzl +++ b/prelude/java/javacd_jar_creator.bzl @@ -9,6 +9,7 @@ load( "@prelude//java:java_providers.bzl", "JavaClasspathEntry", # @unused Used as a type "JavaCompileOutputs", # @unused Used as a type + "generate_java_classpath_snapshot", "make_compile_outputs", ) load("@prelude//java:java_resources.bzl", "get_resources_map") @@ -31,7 +32,6 @@ load( "OutputPaths", "TargetType", "add_java_7_8_bootclasspath", - "add_output_paths_to_cmd_args", "base_qualified_name", "declare_prefixed_output", "define_output_paths", @@ -40,13 +40,14 @@ load( "generate_abi_jars", "get_abi_generation_mode", "get_compiling_deps_tset", + "output_paths_to_hidden_cmd_args", "prepare_cd_exe", "prepare_final_jar", "setup_dep_files", ) load("@prelude//utils:expect.bzl", "expect") -base_command_params = struct( +_base_command_params = struct( withDownwardApi = True, spoolMode = "DIRECT_TO_JAR", ) @@ -57,13 +58,13 @@ def create_jar_artifact_javacd( abi_generation_mode: [AbiGenerationMode, None], java_toolchain: JavaToolchainInfo, label, - output: [Artifact, None], + output: Artifact | None, javac_tool: [typing.Any, None], srcs: list[Artifact], remove_classes: list[str], resources: list[Artifact], resources_root: [str, None], - manifest_file: [Artifact, None], + manifest_file: Artifact | None, annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], source_level: int, @@ -73,10 +74,11 @@ def create_jar_artifact_javacd( source_only_abi_deps: list[Dependency], extra_arguments: cmd_args, additional_classpath_entries: list[Artifact], - additional_compiled_srcs: [Artifact, None], + additional_compiled_srcs: Artifact | None, bootclasspath_entries: list[Artifact], is_building_android_binary: bool, - is_creating_subtarget: bool = False) -> JavaCompileOutputs: + is_creating_subtarget: bool = False, + debug_port: [int, None] = None) -> JavaCompileOutputs: if javac_tool != None: # TODO(cjhopman): We can probably handle this better. I think we should be able to just use the non-javacd path. fail("cannot set explicit javac on library when using javacd") @@ -136,7 +138,7 @@ def create_jar_artifact_javacd( ) return struct( - baseCommandParams = base_command_params, + _baseCommandParams = _base_command_params, libraryJarCommand = struct( baseJarCommand = base_jar_command, libraryJarBaseCommand = struct( @@ -182,7 +184,7 @@ def create_jar_artifact_javacd( ) return struct( - baseCommandParams = base_command_params, + _baseCommandParams = _base_command_params, abiJarCommand = abi_command, ) @@ -194,9 +196,9 @@ def create_jar_artifact_javacd( qualified_name: str, output_paths: OutputPaths, classpath_jars_tag: ArtifactTag, - abi_dir: [Artifact, None], + abi_dir: Artifact | None, target_type: TargetType, - path_to_class_hashes: [Artifact, None], + path_to_class_hashes: Artifact | None, is_creating_subtarget: bool = False, source_only_abi_compiling_deps: list[JavaClasspathEntry] = []): proto = declare_prefixed_output(actions, actions_identifier, "jar_command.proto.json") @@ -213,8 +215,9 @@ def create_jar_artifact_javacd( compiler = compiler, main_class = java_toolchain.javacd_main_class, worker = java_toolchain.javacd_worker[WorkerInfo], - debug_port = java_toolchain.javacd_debug_port, - debug_target = java_toolchain.javacd_debug_target, + target_specified_debug_port = debug_port, + toolchain_specified_debug_port = java_toolchain.javacd_debug_port, + toolchain_specified_debug_target = java_toolchain.javacd_debug_target, extra_jvm_args = java_toolchain.javacd_jvm_args, extra_jvm_args_target = java_toolchain.javacd_jvm_args_target, ) @@ -244,10 +247,7 @@ def create_jar_artifact_javacd( abi_dir.as_output(), ) - args = add_output_paths_to_cmd_args(args, output_paths, path_to_class_hashes) - - # TODO(cjhopman): make sure this works both locally and remote. - event_pipe_out = declare_prefixed_output(actions, actions_identifier, "events.data") + args.add(output_paths_to_hidden_cmd_args(output_paths, path_to_class_hashes)) dep_files = {} if not is_creating_subtarget and srcs and (java_toolchain.dep_files == DepFiles("per_jar") or java_toolchain.dep_files == DepFiles("per_class")) and track_class_usage: @@ -277,12 +277,12 @@ def create_jar_artifact_javacd( args, env = { "BUCK_CLASSPATH": compiler, - "BUCK_EVENT_PIPE": event_pipe_out.as_output(), "JAVACD_ABSOLUTE_PATHS_ARE_RELATIVE_TO_CWD": "1", }, category = "{}javacd_jar".format(category_prefix), identifier = actions_identifier or "", dep_files = dep_files, + allow_dep_file_cache_upload = False, exe = exe, local_only = local_only, low_pass_filter = False, @@ -305,7 +305,7 @@ def create_jar_artifact_javacd( is_creating_subtarget, ) jar_postprocessor = ctx.attrs.jar_postprocessor[RunInfo] if hasattr(ctx.attrs, "jar_postprocessor") and ctx.attrs.jar_postprocessor else None - final_jar = prepare_final_jar( + final_jar_output = prepare_final_jar( actions = actions, actions_identifier = actions_identifier, output = output, @@ -324,7 +324,7 @@ def create_jar_artifact_javacd( additional_compiled_srcs, is_building_android_binary, java_toolchain.class_abi_generator, - final_jar, + final_jar_output.final_jar, compiling_deps_tset, source_only_abi_deps, class_abi_jar = class_abi_jar, @@ -333,8 +333,10 @@ def create_jar_artifact_javacd( define_action = define_javacd_action, ) + abi_jar_snapshot = generate_java_classpath_snapshot(ctx.actions, java_toolchain.cp_snapshot_generator, classpath_abi, actions_identifier) result = make_compile_outputs( - full_library = final_jar, + full_library = final_jar_output.final_jar, + preprocessed_library = final_jar_output.preprocessed_jar, class_abi = class_abi, source_abi = source_abi, source_only_abi = source_only_abi, @@ -342,11 +344,15 @@ def create_jar_artifact_javacd( classpath_abi_dir = classpath_abi_dir, required_for_source_only_abi = required_for_source_only_abi, annotation_processor_output = output_paths.annotations, + abi_jar_snapshot = abi_jar_snapshot, ) else: + full_jar_snapshot = generate_java_classpath_snapshot(ctx.actions, java_toolchain.cp_snapshot_generator, final_jar_output.final_jar, actions_identifier) result = make_compile_outputs( - full_library = final_jar, + full_library = final_jar_output.final_jar, + preprocessed_library = final_jar_output.preprocessed_jar, required_for_source_only_abi = required_for_source_only_abi, annotation_processor_output = output_paths.annotations, + abi_jar_snapshot = full_jar_snapshot, ) return result diff --git a/prelude/java/plugins/java_annotation_processor.bzl b/prelude/java/plugins/java_annotation_processor.bzl index a674922b47b..ca8545453da 100644 --- a/prelude/java/plugins/java_annotation_processor.bzl +++ b/prelude/java/plugins/java_annotation_processor.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//java:java_providers.bzl", "JavaLibraryInfo", "JavaPackagingDepTSet", "JavaPackagingInfo") +load("@prelude//utils:type_defs.bzl", "is_tuple") JavaProcessorsType = enum( "java_annotation_processor", @@ -57,7 +58,7 @@ def derive_transitive_deps(ctx: AnalysisContext, deps: list[Dependency]) -> [Jav def create_annotation_processor_properties( ctx: AnalysisContext, - plugins: list[Dependency], + plugins: list[[Dependency, (Dependency, list[str])]], annotation_processor_names: list[str], annotation_processor_params: list[str], annotation_processor_deps: list[Dependency]) -> AnnotationProcessorProperties: @@ -65,7 +66,7 @@ def create_annotation_processor_properties( # Extend `ap_processor_deps` with java deps from `annotation_processor_deps` if annotation_processor_names or annotation_processor_deps: - for ap_dep in [x.get(JavaLibraryInfo) for x in annotation_processor_deps]: + for ap_dep in [_get_plugin_provider(x, JavaLibraryInfo) for x in annotation_processor_deps]: if not ap_dep: fail("Dependency must have a type of `java_library` or `prebuilt_jar`. Deps: {}".format(annotation_processor_deps)) @@ -80,7 +81,7 @@ def create_annotation_processor_properties( )) # APs derived from `plugins` attribute - for ap_plugin in filter(None, [x.get(JavaProcessorsInfo) for x in plugins]): + for ap_plugin in filter(None, [_get_plugin_provider(x, JavaProcessorsInfo) for x in plugins]): if not ap_plugin: fail("Plugin must have a type of `java_annotation_processor` or `java_plugin`. Plugins: {}".format(plugins)) if ap_plugin.type == JavaProcessorsType("java_annotation_processor"): @@ -97,30 +98,24 @@ def create_annotation_processor_properties( annotation_processor_params = annotation_processor_params, ) -def create_ksp_annotation_processor_properties(ctx: AnalysisContext, plugins: list[Dependency]) -> AnnotationProcessorProperties: - ap_processors = [] - ap_processor_deps = [] +def create_ksp_annotation_processor_properties(plugins: list[[Dependency, (Dependency, list[str])]]) -> AnnotationProcessorProperties: + annotation_processors = [] # APs derived from `plugins` attribute - for ap_plugin in filter(None, [x.get(JavaProcessorsInfo) for x in plugins]): + for ap_plugin in filter(None, [_get_plugin_provider(x, JavaProcessorsInfo) for x in plugins]): if not ap_plugin: fail("Plugin must have a type of `java_annotation_processor` or `java_plugin`. Plugins: {}".format(plugins)) if ap_plugin.type == JavaProcessorsType("ksp_annotation_processor"): - ap_processors += ap_plugin.processors - if ap_plugin.deps: - ap_processor_deps.append(ap_plugin.deps) - - if not ap_processors: - return AnnotationProcessorProperties(annotation_processors = [], annotation_processor_params = []) + annotation_processors.append(AnnotationProcessor( + affects_abi = ap_plugin.affects_abi, + supports_source_only_abi = ap_plugin.supports_source_only_abi, + processors = ap_plugin.processors, + deps = ap_plugin.deps, + isolate_class_loader = ap_plugin.isolate_class_loader, + )) return AnnotationProcessorProperties( - annotation_processors = [AnnotationProcessor( - processors = dedupe(ap_processors), - deps = ctx.actions.tset(JavaPackagingDepTSet, children = ap_processor_deps) if ap_processor_deps else None, - affects_abi = True, - supports_source_only_abi = False, - isolate_class_loader = False, - )], + annotation_processors = annotation_processors, annotation_processor_params = [], ) @@ -130,6 +125,9 @@ def _get_processor_type(processor_class: str) -> JavaProcessorsType: return JavaProcessorsType("java_annotation_processor") +def _get_plugin_provider(plugin: [Dependency, (Dependency, list[str])], provider: typing.Callable[[], Provider]) -> [Provider, None]: + return (plugin[0] if is_tuple(plugin) else plugin).get(provider) + def java_annotation_processor_impl(ctx: AnalysisContext) -> list[Provider]: if ctx.attrs._build_only_native_code: return [DefaultInfo()] diff --git a/prelude/java/plugins/java_plugin.bzl b/prelude/java/plugins/java_plugin.bzl index 2636e137a23..97236581741 100644 --- a/prelude/java/plugins/java_plugin.bzl +++ b/prelude/java/plugins/java_plugin.bzl @@ -12,25 +12,27 @@ load( "JavaProcessorsType", "derive_transitive_deps", ) +load("@prelude//utils:type_defs.bzl", "is_tuple") PluginParams = record( processors = field(list[(str, cmd_args)]), deps = field([JavaPackagingDepTSet, None]), ) -def create_plugin_params(ctx: AnalysisContext, plugins: list[Dependency]) -> [PluginParams, None]: +def create_plugin_params(ctx: AnalysisContext, plugins: list[[Dependency, (Dependency, list[str])]]) -> [PluginParams, None]: processors = [] plugin_deps = [] - # _wip_java_plugin_arguments keys are providers_label, map to - # target_label to allow lookup with plugin.label.raw_target() - plugin_arguments = { - label.raw_target(): arguments - for label, arguments in ctx.attrs._wip_java_plugin_arguments.items() - } - # Compiler plugin derived from `plugins` attribute - for plugin in plugins: + for item in plugins: + # Each plugin can be either a tuple of (target, arguments) or just the target + if is_tuple(item): + plugin = item[0] + arguments = item[1] + else: + plugin = item + arguments = None + processors_info = plugin.get(JavaProcessorsInfo) if processors_info != None and processors_info.type == JavaProcessorsType("plugin"): if len(processors_info.processors) > 1: @@ -39,7 +41,6 @@ def create_plugin_params(ctx: AnalysisContext, plugins: list[Dependency]) -> [Pl if processors_info.deps: plugin_deps.append(processors_info.deps) - arguments = plugin_arguments.get(plugin.label.raw_target()) processors.append((processor, cmd_args(arguments) if arguments != None else cmd_args())) if not processors: diff --git a/prelude/java/prebuilt_jar.bzl b/prelude/java/prebuilt_jar.bzl index b55cec88888..d1b4ae80a60 100644 --- a/prelude/java/prebuilt_jar.bzl +++ b/prelude/java/prebuilt_jar.bzl @@ -12,6 +12,7 @@ load( "JavaClasspathEntry", "create_abi", "create_java_library_providers", + "generate_java_classpath_snapshot", ) load(":java_toolchain.bzl", "PrebuiltJarToolchainInfo") @@ -42,27 +43,31 @@ def prebuilt_jar_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions.copy_file(gwt_output, ctx.attrs.source_jar if ctx.attrs.source_jar else ctx.attrs.binary_jar) abi = None - if ctx.attrs.generate_abi: - prebuilt_jar_toolchain = ctx.attrs._prebuilt_jar_toolchain[PrebuiltJarToolchainInfo] - if not prebuilt_jar_toolchain.is_bootstrap_toolchain: + prebuilt_jar_toolchain = ctx.attrs._prebuilt_jar_toolchain[PrebuiltJarToolchainInfo] + if not prebuilt_jar_toolchain.is_bootstrap_toolchain: + if ctx.attrs.generate_abi: abi = create_abi(ctx.actions, prebuilt_jar_toolchain.class_abi_generator, output) + jar_snapshot = generate_java_classpath_snapshot(ctx.actions, ctx.attrs._prebuilt_jar_toolchain[PrebuiltJarToolchainInfo].cp_snapshot_generator, abi or output, "") library_output_classpath_entry = JavaClasspathEntry( full_library = output, abi = abi or output, abi_as_dir = None, required_for_source_only_abi = ctx.attrs.required_for_source_only_abi, + abi_jar_snapshot = jar_snapshot, ) - java_library_info, java_packaging_info, shared_library_info, cxx_resource_info, linkable_graph, template_placeholder_info, _ = create_java_library_providers( + java_library_info, java_packaging_info, global_code_info, shared_library_info, cxx_resource_info, linkable_graph, template_placeholder_info, _ = create_java_library_providers( ctx, library_output = library_output_classpath_entry, + global_code_config = prebuilt_jar_toolchain.global_code_config, declared_deps = ctx.attrs.deps, exported_deps = ctx.attrs.deps, provided_deps = ctx.attrs.desugar_deps, needs_desugar = True, is_prebuilt_jar = True, gwt_module = gwt_output, + sources_jar = ctx.attrs.source_jar, ) # TODO(T107163344) this shouldn't be in prebuilt_jar itself, use overlays to remove it. @@ -78,10 +83,15 @@ def prebuilt_jar_impl(ctx: AnalysisContext) -> list[Provider]: return [ java_library_info, java_packaging_info, + global_code_info, shared_library_info, cxx_resource_info, android_packageable_info, template_placeholder_info, linkable_graph, DefaultInfo(default_output = output, sub_targets = sub_targets), - ] + ] + ( + [ + RunInfo(args = cmd_args([ctx.attrs._prebuilt_jar_toolchain[PrebuiltJarToolchainInfo].java[RunInfo], "-jar", output])), + ] if ctx.attrs.is_executable else [] + ) diff --git a/prelude/java/tools/BUCK.v2 b/prelude/java/tools/BUCK.v2 index ac3c111fc8f..f47f01a54a4 100644 --- a/prelude/java/tools/BUCK.v2 +++ b/prelude/java/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( @@ -77,7 +83,7 @@ prelude.python_bootstrap_library( ], visibility = [ "prelude//android/tools/...", - "prelude//kotlin/tools/...", "prelude//java/tools/...", + "prelude//kotlin/tools/...", ], ) diff --git a/prelude/java/tools/fat_jar.py b/prelude/java/tools/fat_jar.py index 7c3cd22a403..3c9f9b52c03 100644 --- a/prelude/java/tools/fat_jar.py +++ b/prelude/java/tools/fat_jar.py @@ -14,7 +14,7 @@ import zipfile from shutil import copy, copytree from tempfile import TemporaryDirectory -from typing import Dict, List +from typing import Optional import utils @@ -30,6 +30,12 @@ def _parse_args(): required=True, help="tool for building jars", ) + parser.add_argument( + "--zip_scrubber_tool", + type=str, + required=True, + help="tool for scrubbing jars", + ) parser.add_argument( "--output", type=pathlib.Path, required=True, help="a path to an output result" ) @@ -103,19 +109,45 @@ def _parse_args(): action="store_true", help="Whether to create an inner jar if native libraries are present.", ) + parser.add_argument( + "--append_jar", + required=False, + type=pathlib.Path, + help="path to a jar used as base of the new jar, which new files will be added", + ) return parser.parse_args() -def _merge_dictionaries(dict1: Dict[str, str], dict2: Dict[str, str]) -> Dict[str, str]: - return {**dict1, **dict2} - - -def _shlex_split(cmd: str) -> List[str]: - if platform.system() == "Windows": - return cmd.split() - else: - return shlex.split(cmd) +def _fat_jar( + jar_builder_tool: str, + output_path: str, + append_jar: Optional[str] = None, + main_class: Optional[str] = None, + entries_to_jar_file: Optional[str] = None, + override_entries_to_jar_file: Optional[str] = None, + manifest_file: Optional[str] = None, + blocklist_file: Optional[str] = None, +) -> None: + cmd = [] + cmd.extend(utils.shlex_split(jar_builder_tool)) + if append_jar: + cmd.extend(["--append-jar", append_jar]) + if main_class: + cmd.extend(["--main-class", main_class]) + if entries_to_jar_file: + cmd.extend(["--entries-to-jar", entries_to_jar_file]) + if override_entries_to_jar_file: + cmd.extend(["--override-entries-to-jar", override_entries_to_jar_file]) + if manifest_file: + cmd.extend(["--manifest-file", manifest_file]) + if blocklist_file: + cmd.extend(["--blocklist-patterns", blocklist_file]) + cmd.extend(["--blocklist-patterns-matcher", "substring"]) + cmd.append("--merge-manifests") + cmd.extend(["--output", output_path]) + utils.log_message("fat_jar_cmd: {}".format(cmd)) + utils.execute_command(cmd) # Reads a list of files from native_libs_file and symlinks each as files in native_libs_dir. @@ -138,12 +170,14 @@ def main(): args = _parse_args() jar_builder_tool = args.jar_builder_tool + zip_scrubber_tool = args.zip_scrubber_tool output_path = args.output jars_file = args.jars_file main_class = args.main_class manifest = args.manifest blocklist_file = args.blocklist meta_inf_directory = args.meta_inf_directory + append_jar = args.append_jar generate_wrapper = args.generate_wrapper classpath_args_output = args.classpath_args_output @@ -184,6 +218,8 @@ def main(): utils.log_message("classpath_args_output: {}".format(classpath_args_output)) utils.log_message("java_tool: {}".format(java_tool)) utils.log_message("script_marker_file_name: {}".format(script_marker_file_name)) + if append_jar: + utils.log_message("append_jar = {}".format(append_jar)) need_to_process_native_libs = native_libs_file is not None if need_to_process_native_libs and not do_not_create_inner_jar: @@ -251,8 +287,8 @@ def main(): else: # generate fat jar - jar_cmd = [] - jar_cmd.extend(utils.shlex_split(jar_builder_tool)) + entries_to_jar_file = jars_file + override_entries_to_jar = None if need_to_process_native_libs and do_not_create_inner_jar: # symlink native libs to `nativelibs` directory @@ -276,11 +312,7 @@ def main(): f.write(str(f2.read()) + "\n") f.write(str(native_libs_staging)) - jar_cmd.extend( - ["--entries-to-jar", jars_and_native_libs_directory_file] - ) - else: - jar_cmd.extend(["--entries-to-jar", jars_file]) + entries_to_jar_file = jars_and_native_libs_directory_file if meta_inf_directory: meta_inf_staging = pathlib.Path(temp_dir) / "meta_inf_staging" @@ -298,28 +330,26 @@ def main(): with open(meta_inf_directory_file, "w") as f: f.write(str(meta_inf_staging)) - jar_cmd.extend(["--override-entries-to-jar", meta_inf_directory_file]) - - if main_class: - jar_cmd.extend(["--main-class", main_class]) - - if blocklist_file: - jar_cmd.extend(["--blocklist-patterns", blocklist_file]) - jar_cmd.extend(["--blocklist-patterns-matcher", "substring"]) - - if manifest: - jar_cmd.extend(["--manifest-file", manifest]) - - jar_cmd.append("--merge-manifests") + override_entries_to_jar = meta_inf_directory_file jar_output = ( os.path.join(temp_dir, "inner.jar") if need_to_process_native_libs and not do_not_create_inner_jar else output_path ) - jar_cmd.extend(["--output", jar_output]) - utils.log_message("jar_cmd: {}".format(jar_cmd)) - utils.execute_command(jar_cmd) + + utils.log_message("jar_output: {}".format(jar_output)) + + _fat_jar( + jar_builder_tool=jar_builder_tool, + output_path=jar_output, + main_class=main_class, + entries_to_jar_file=entries_to_jar_file, + override_entries_to_jar_file=override_entries_to_jar, + manifest_file=manifest, + blocklist_file=blocklist_file, + append_jar=append_jar, + ) if need_to_process_native_libs and not do_not_create_inner_jar: fat_jar_content_dir = os.path.join(temp_dir, "fat_jar_content_dir") @@ -369,17 +399,21 @@ def main(): content.relative_to(fat_jar_content_dir), ) + zip_scrubber_cmd = [] + zip_scrubber_cmd.extend(utils.shlex_split(zip_scrubber_tool)) + zip_scrubber_cmd.extend([contents_zip_path]) + utils.execute_command(zip_scrubber_cmd) + entries_to_jar_file = os.path.join(temp_dir, "entries_to_jar.txt") with open(entries_to_jar_file, "w") as f: f.write("\n".join([contents_zip_path, str(fat_jar_lib)])) - fat_jar_cmd = [] - fat_jar_cmd.extend(utils.shlex_split(jar_builder_tool)) - fat_jar_cmd.extend(["--main-class", fat_jar_main_class]) - fat_jar_cmd.extend(["--output", output_path]) - fat_jar_cmd.extend(["--entries-to-jar", entries_to_jar_file]) - fat_jar_cmd.append("--merge-manifests") - utils.execute_command(fat_jar_cmd) + _fat_jar( + jar_builder_tool=jar_builder_tool, + output_path=output_path, + main_class=fat_jar_main_class, + entries_to_jar_file=entries_to_jar_file, + ) if __name__ == "__main__": diff --git a/prelude/java/tools/gen_class_to_source_map.py b/prelude/java/tools/gen_class_to_source_map.py index 8e58557d46f..f86d83cd1ee 100644 --- a/prelude/java/tools/gen_class_to_source_map.py +++ b/prelude/java/tools/gen_class_to_source_map.py @@ -12,11 +12,27 @@ import zipfile +def _base_class_name_matches_base_source_path( + base_class_name: str, base_source_path: str +): + return base_class_name == base_source_path or base_source_path.endswith( + "/" + base_class_name + ) + + def main(argv): parser = argparse.ArgumentParser(fromfile_prefix_chars="@") + parser.add_argument( + "--include_classes_prefixes", + "-i", + default=[], + nargs="*", + help="Prefixes of classes to include in the output, even if their source isn't present", + ) parser.add_argument( "--output", "-o", type=argparse.FileType("w"), default=sys.stdin ) + parser.add_argument("--sources_jar", required=False) parser.add_argument("jar") parser.add_argument("sources", nargs="*") args = parser.parse_args(argv[1:]) @@ -44,16 +60,55 @@ def main(argv): if "$" in base: continue + found = False for src_base, src_path in sources.items(): - if base == src_base or src_base.endswith("/" + base): + if _base_class_name_matches_base_source_path(base, src_base): classes.append( { "className": classname, "srcPath": src_path, } ) + found = True + break + # Kotlin creates .class files with a "Kt" suffix when code is written outside of a class, + # so strip that suffix and redo the comparison. + elif base.endswith("Kt") and _base_class_name_matches_base_source_path( + base[:-2], src_base + ): + classes.append( + { + "className": classname[:-2], + "srcPath": src_path, + } + ) + found = True break + if not found: + # If the class is not present in the sources, we still want to + # include it if it has a prefix that we are interested in. + # certain classes in "androidx.databinding.*" are generated and it's useful to know their presence in jars + for prefix in args.include_classes_prefixes: + if classname.startswith(prefix): + classes.append( + { + "className": classname, + } + ) + break + + if args.sources_jar: + with zipfile.ZipFile(args.sources_jar, "w") as sources_jar: + for d in classes: + if "srcPath" in d: + src_path = d["srcPath"] + class_name = d["className"] + _, src_path_ext = os.path.splitext(src_path) + sources_jar.write( + src_path, class_name.replace(".", "/") + src_path_ext + ) + json.dump( { "jarPath": args.jar, diff --git a/prelude/java/utils/java_utils.bzl b/prelude/java/utils/java_utils.bzl index 2e7a24534a3..476b685b3a8 100644 --- a/prelude/java/utils/java_utils.bzl +++ b/prelude/java/utils/java_utils.bzl @@ -117,20 +117,22 @@ def declare_prefixed_name(name: str, prefix: [str, None]) -> str: def get_class_to_source_map_info( ctx: AnalysisContext, outputs: [JavaCompileOutputs, None], - deps: list[Dependency]) -> (JavaClassToSourceMapInfo, dict): + deps: list[Dependency], + generate_sources_jar: bool = False) -> (JavaClassToSourceMapInfo, Artifact | None, dict): sub_targets = {} class_to_srcs = None class_to_srcs_debuginfo = None + sources_jar = None if outputs != None: name = ctx.label.name - if not ctx.attrs._is_building_android_binary: - class_to_srcs = create_class_to_source_map_from_jar( - actions = ctx.actions, - java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], - name = name + ".class_to_srcs.json", - jar = outputs.classpath_entry.full_library, - srcs = ctx.attrs.srcs, - ) + class_to_srcs, sources_jar = create_class_to_source_map_from_jar( + actions = ctx.actions, + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], + name = name + ".class_to_srcs.json", + jar = outputs.classpath_entry.full_library, + srcs = ctx.attrs.srcs, + sources_jar_name = "{}-sources.jar".format(name) if generate_sources_jar else None, + ) class_to_srcs_debuginfo = maybe_create_class_to_source_map_debuginfo( actions = ctx.actions, java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], @@ -138,6 +140,8 @@ def get_class_to_source_map_info( srcs = ctx.attrs.srcs, ) sub_targets["class-to-srcs"] = [DefaultInfo(default_output = class_to_srcs)] + if sources_jar: + sub_targets["sources.jar"] = [DefaultInfo(default_output = sources_jar)] class_to_src_map_info = create_class_to_source_map_info( ctx = ctx, @@ -147,7 +151,7 @@ def get_class_to_source_map_info( ) if outputs != None: sub_targets["debuginfo"] = [DefaultInfo(default_output = class_to_src_map_info.debuginfo)] - return (class_to_src_map_info, sub_targets) + return (class_to_src_map_info, sources_jar, sub_targets) def get_classpath_subtarget(actions: AnalysisActions, packaging_info: JavaPackagingInfo) -> dict[str, list[Provider]]: proj = packaging_info.packaging_deps.project_as_args("full_jar_args") diff --git a/prelude/js/js.bzl b/prelude/js/js.bzl index e8ecc804600..37b1311e9f7 100644 --- a/prelude/js/js.bzl +++ b/prelude/js/js.bzl @@ -37,6 +37,13 @@ def _is_release(): "config//build_mode/constraints:release": True, }) +def _select_asset_dest_path_resolver(): + return select({ + "DEFAULT": None, + "fbsource//tools/build_defs/js/config:asset_dest_path_resolver_android": "android", + "fbsource//tools/build_defs/js/config:asset_dest_path_resolver_generic": "generic", + }) + implemented_rules = { "js_bundle": js_bundle_impl, "js_bundle_genrule": js_bundle_genrule_impl, @@ -68,6 +75,10 @@ extra_attributes = { }, "js_library": { "worker": attrs.exec_dep(), + "_asset_dest_path_resolver": attrs.option( + attrs.string(), + default = _select_asset_dest_path_resolver(), + ), "_build_only_native_code": attrs.bool(default = is_build_only_native_code()), "_is_release": attrs.bool( default = _is_release(), diff --git a/prelude/js/js_bundle.bzl b/prelude/js/js_bundle.bzl index 2b25374ee08..e9062173143 100644 --- a/prelude/js/js_bundle.bzl +++ b/prelude/js/js_bundle.bzl @@ -49,10 +49,11 @@ def _build_dependencies_file( command_args_files = [command_args_file], identifier = transform_profile, category = "dependencies", - hidden_artifacts = [cmd_args([ + hidden_artifacts = [cmd_args( dependencies_file.as_output(), extra_data_args, - ]).add(transitive_js_library_outputs)], + transitive_js_library_outputs, + )], ) return dependencies_file @@ -110,13 +111,14 @@ def _build_js_bundle( command_args_files = [command_args_file], identifier = base_dir, category = job_args["command"], - hidden_artifacts = [cmd_args([ + hidden_artifacts = [cmd_args( bundle_dir_output.as_output(), assets_dir.as_output(), misc_dir_path.as_output(), source_map.as_output(), extra_data_args, - ]).add(transitive_js_library_outputs)], + transitive_js_library_outputs, + )], ) return JsBundleInfo( diff --git a/prelude/js/js_library.bzl b/prelude/js/js_library.bzl index 9490bb4cb78..8bbe0ad46be 100644 --- a/prelude/js/js_library.bzl +++ b/prelude/js/js_library.bzl @@ -107,16 +107,25 @@ def _build_library_files( flavors: list[str], js_files: list[Artifact]) -> Artifact: output_path = ctx.actions.declare_output("library-files-out/{}/library_files".format(transform_profile)) + + job_args = { + "command": "library-files", + "flavors": flavors, + "outputFilePath": output_path, + "platform": ctx.attrs._platform, + "release": ctx.attrs._is_release, + "sourceFilePaths": js_files, + } + + if ctx.attrs.extra_json: + job_args["extraData"] = cmd_args(ctx.attrs.extra_json, delimiter = "") + + if ctx.attrs._asset_dest_path_resolver: + job_args["assetDestPathResolver"] = ctx.attrs._asset_dest_path_resolver + command_args_file = ctx.actions.write_json( "library_files_{}_command_args".format(transform_profile), - { - "command": "library-files", - "flavors": flavors, - "outputFilePath": output_path, - "platform": ctx.attrs._platform, - "release": ctx.attrs._is_release, - "sourceFilePaths": js_files, - }, + job_args, ) run_worker_commands( diff --git a/prelude/js/js_providers.bzl b/prelude/js/js_providers.bzl index bc24622a4be..c5027eaf579 100644 --- a/prelude/js/js_providers.bzl +++ b/prelude/js/js_providers.bzl @@ -37,7 +37,7 @@ JsBundleInfo = provider( def get_transitive_outputs( actions: AnalysisActions, - value: [Artifact, None] = None, + value: Artifact | None = None, deps: list[JsLibraryInfo] = []) -> TransitiveOutputsTSet: kwargs = {} if value: diff --git a/prelude/js/js_utils.bzl b/prelude/js/js_utils.bzl index b52f2293218..d95f84e664c 100644 --- a/prelude/js/js_utils.bzl +++ b/prelude/js/js_utils.bzl @@ -10,6 +10,7 @@ load("@prelude//:worker_tool.bzl", "WorkerToolInfo") load("@prelude//apple:apple_resource_types.bzl", "AppleResourceDestination", "AppleResourceSpec") load("@prelude//apple:resource_groups.bzl", "ResourceGraphInfo", "create_resource_graph") # @unused `ResourceGraphInfo` used as a type load("@prelude//js:js_providers.bzl", "JsBundleInfo") +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:expect.bzl", "expect") RAM_BUNDLE_TYPES = { @@ -142,17 +143,25 @@ def run_worker_commands( identifier: str, category: str, hidden_artifacts = [cmd_args]): - worker_args = cmd_args("--command-args-file", command_args_files) - worker_args.add("--command-args-file-extra-data-fixup-hack=true") - - worker_argsfile = ctx.actions.declare_output(paths.join(identifier, "worker_{}.argsfile".format(category))) - ctx.actions.write(worker_argsfile.as_output(), worker_args) + worker_args = cmd_args( + "--command-args-file", + command_args_files, + "--command-args-file-extra-data-fixup-hack=true", + ) worker_tool_info = worker_tool[WorkerToolInfo] - worker_command = worker_tool_info.command.copy() - worker_command.hidden(hidden_artifacts) - worker_command.hidden(command_args_files) - worker_command.add(cmd_args(worker_argsfile, format = "@{}")) + worker_command = cmd_args( + worker_tool_info.command.copy(), + at_argfile( + actions = ctx.actions, + name = paths.join(identifier, "{}.js_worker_argsfile".format(category)), + args = worker_args, + ), + hidden = [ + hidden_artifacts, + command_args_files, + ], + ) ctx.actions.run( worker_command, diff --git a/prelude/julia/julia.bzl b/prelude/julia/julia.bzl index 74e7f433a1f..4337bef7a60 100644 --- a/prelude/julia/julia.bzl +++ b/prelude/julia/julia.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//decls/common.bzl", "buck") load(":julia_binary.bzl", "julia_binary_impl") load(":julia_library.bzl", "julia_jll_library_impl", "julia_library_impl") load(":julia_test.bzl", "julia_test_impl") @@ -48,5 +49,5 @@ extra_attributes = { "srcs": attrs.list(attrs.source(), default = []), "_julia_toolchain": julia_toolchain(), # TODO: coverage - }, + } | buck.inject_test_env_arg(), } diff --git a/prelude/julia/julia_binary.bzl b/prelude/julia/julia_binary.bzl index c1cedffee21..d9441aa4112 100644 --- a/prelude/julia/julia_binary.bzl +++ b/prelude/julia/julia_binary.bzl @@ -5,7 +5,12 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "create_shlib_symlink_tree", + "merge_shared_libraries", + "traverse_shared_library_info", +) load("@prelude//utils:utils.bzl", "flatten") load(":julia_info.bzl", "JuliaLibraryInfo", "JuliaLibraryTSet", "JuliaToolchainInfo") @@ -47,12 +52,13 @@ def build_jll_shlibs_mapping(ctx: AnalysisContext, json_info_file: Artifact): filter(None, [d.shared_library_info for d in deps]), )) - shared_libs_symlink_tree = ctx.actions.symlinked_dir( - "__shared_libs_symlink_tree__", - {name: shlib.lib.output for name, shlib in shlibs.items()}, + shared_libs_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = "__shared_libs_symlink_tree__", + shared_libs = shlibs, ) - shlib_label_to_soname = {shlib.label: name for name, shlib in shlibs.items()} + shlib_label_to_soname = {shlib.label: shlib.soname.ensure_str() for shlib in shlibs} # iterate through all the jll libraries json_info = [] @@ -64,8 +70,11 @@ def build_jll_shlibs_mapping(ctx: AnalysisContext, json_info_file: Artifact): # iterate through all the shlib dependencies for the current jll artifact_info = [] for julia_name, label in jll.libs.items(): - symlink_dir = cmd_args(shared_libs_symlink_tree, delimiter = "") - symlink_dir.relative_to(json_info_file) # That cannot be produced by a tset projection + symlink_dir = cmd_args( + shared_libs_symlink_tree, + delimiter = "", + relative_to = json_info_file, # That cannot be produced by a tset projection + ) artifact_info.append((julia_name, symlink_dir, shlib_label_to_soname[label])) json_info.append((jll.name, jli.uuid, artifact_info)) @@ -100,19 +109,20 @@ def build_julia_command(ctx): """ julia_toolchain = ctx.attrs._julia_toolchain[JuliaToolchainInfo] - # python processor - cmd = cmd_args([julia_toolchain.cmd_processor]) - # build out the symlink tree for libs symlink_dir = build_load_path_symtree(ctx) - cmd.hidden(symlink_dir) # build symdir for sources srcs_by_path = {f.short_path: f for f in ctx.attrs.srcs} srcs = ctx.actions.symlinked_dir("srcs_tree", srcs_by_path) if ctx.attrs.main not in srcs_by_path: fail("main should be in srcs!") - cmd.hidden(srcs) + + # python processor + cmd = cmd_args( + [julia_toolchain.cmd_processor], + hidden = [symlink_dir] + [srcs], + ) # prepare a json file to hold all the data the python preprocessor needs to # execute the julia interpreter. @@ -122,10 +132,10 @@ def build_julia_command(ctx): "env": julia_toolchain.env, "jll_mapping": build_jll_shlibs_mapping(ctx, json_info_file), "julia_args": ctx.attrs.julia_args, - "julia_binary": cmd_args(julia_toolchain.julia, delimiter = " ").relative_to(json_info_file), + "julia_binary": cmd_args(julia_toolchain.julia, delimiter = " ", relative_to = json_info_file), "julia_flags": ctx.attrs.julia_flags, - "lib_path": cmd_args(symlink_dir, delimiter = " ").relative_to(json_info_file), - "main": cmd_args(srcs.project(ctx.attrs.main), delimiter = " ").relative_to(json_info_file), + "lib_path": cmd_args(symlink_dir, delimiter = " ", relative_to = json_info_file), + "main": cmd_args(srcs.project(ctx.attrs.main), delimiter = " ", relative_to = json_info_file), } json_file_loc = ctx.actions.write_json(json_info_file, json_info_dict, with_inputs = True) diff --git a/prelude/julia/tools/BUCK.v2 b/prelude/julia/tools/BUCK.v2 index 2f205a4bb0e..3867e739610 100644 --- a/prelude/julia/tools/BUCK.v2 +++ b/prelude/julia/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index c25aff5711c..67b8c6e3805 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -23,8 +23,7 @@ load("@prelude//java/utils:java_utils.bzl", "declare_prefixed_name") load("@prelude//utils:expect.bzl", "expect") def add_java_7_8_bootclasspath(target_level: int, bootclasspath_entries: list[Artifact], java_toolchain: JavaToolchainInfo) -> list[Artifact]: - if target_level == 7: - return bootclasspath_entries + java_toolchain.bootclasspath_7 + # bootclasspath_7 is deprecated. if target_level == 8: return bootclasspath_entries + java_toolchain.bootclasspath_8 return bootclasspath_entries @@ -97,7 +96,6 @@ OutputPaths = record( jar = Artifact, classes = Artifact, annotations = Artifact, - scratch = Artifact, ) def qualified_name_with_subtarget(label: Label) -> str: @@ -127,27 +125,24 @@ def define_output_paths(actions: AnalysisActions, prefix: [str, None], label: La jar = jar_parent.project("{}.jar".format(label.name)), classes = declare_prefixed_output(actions, prefix, "__classes__", dir = True), annotations = declare_prefixed_output(actions, prefix, "__gen__", dir = True), - scratch = declare_prefixed_output(actions, prefix, "scratch", dir = True), ) # buildifier: disable=uninitialized -def add_output_paths_to_cmd_args(cmd: cmd_args, output_paths: OutputPaths, path_to_class_hashes: [Artifact, None]) -> cmd_args: +def output_paths_to_hidden_cmd_args(output_paths: OutputPaths, path_to_class_hashes: Artifact | None) -> cmd_args: + hidden = [] if path_to_class_hashes != None: - cmd.hidden(path_to_class_hashes.as_output()) - cmd.hidden(output_paths.jar_parent.as_output()) - cmd.hidden(output_paths.jar.as_output()) - cmd.hidden(output_paths.classes.as_output()) - cmd.hidden(output_paths.annotations.as_output()) - cmd.hidden(output_paths.scratch.as_output()) - return cmd + hidden.append(path_to_class_hashes.as_output()) + hidden.append(output_paths.jar_parent.as_output()) + hidden.append(output_paths.jar.as_output()) + hidden.append(output_paths.classes.as_output()) + hidden.append(output_paths.annotations.as_output()) + return cmd_args(hidden = hidden) def encode_output_paths(label: Label, paths: OutputPaths, target_type: TargetType) -> struct: paths = struct( classesDir = paths.classes.as_output(), outputJarDirPath = paths.jar_parent.as_output(), annotationPath = paths.annotations.as_output(), - pathToSourcesList = cmd_args([paths.scratch.as_output(), "/", "__srcs__"], delimiter = ""), - workingDirectory = paths.scratch.as_output(), outputJarPath = paths.jar.as_output(), ) @@ -158,7 +153,7 @@ def encode_output_paths(label: Label, paths: OutputPaths, target_type: TargetTyp libraryTargetFullyQualifiedName = base_qualified_name(label), ) -def encode_jar_params(remove_classes: list[str], output_paths: OutputPaths, manifest_file: [Artifact, None]) -> struct: +def encode_jar_params(remove_classes: list[str], output_paths: OutputPaths, manifest_file: Artifact | None) -> struct: return struct( jarPath = output_paths.jar.as_output(), removeEntryPredicate = struct( @@ -200,6 +195,7 @@ def get_compiling_deps_tset( abi = entry, abi_as_dir = None, required_for_source_only_abi = True, + abi_jar_snapshot = None, ))) compiling_deps_tset = actions.tset(JavaCompilingDepsTSet, children = children) @@ -289,19 +285,23 @@ def encode_base_jar_command( resources_map: dict[str, Artifact], annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], - manifest_file: [Artifact, None], + manifest_file: Artifact | None, extra_arguments: cmd_args, source_only_abi_compiling_deps: list[JavaClasspathEntry], - track_class_usage: bool) -> struct: + track_class_usage: bool, + is_incremental: bool = False) -> struct: library_jar_params = encode_jar_params(remove_classes, output_paths, manifest_file) qualified_name = get_qualified_name(label, target_type) if target_type == TargetType("source_only_abi"): compiling_classpath = classpath_jars_tag.tag_artifacts([dep.abi for dep in source_only_abi_compiling_deps]) + compiling_classpath_snapshot = {} else: expect(len(source_only_abi_compiling_deps) == 0) + compiling_deps_list = filter(None, list(compiling_deps_tset.traverse())) if compiling_deps_tset else [] compiling_classpath = classpath_jars_tag.tag_artifacts( - compiling_deps_tset.project_as_json("javacd_json") if compiling_deps_tset else None, + [dep.abi for dep in compiling_deps_list], ) + compiling_classpath_snapshot = {dep.abi: dep.abi_jar_snapshot for dep in compiling_deps_list if dep.abi_jar_snapshot} if is_incremental else {} build_target_value = struct( fullyQualifiedName = qualified_name, @@ -332,6 +332,7 @@ def encode_base_jar_command( return struct( outputPathsValue = encode_output_paths(label, output_paths, target_type), compileTimeClasspathPaths = compiling_classpath, + compileTimeClasspathSnapshotPaths = compiling_classpath_snapshot, javaSrcs = srcs, # TODO(cjhopman): populate jar infos. I think these are only used for unused dependencies (and appear to be broken in buck1 w/javacd anyway). fullJarInfos = [], @@ -366,9 +367,10 @@ def setup_dep_files( hidden = ["artifact"]) -> cmd_args: dep_file = declare_prefixed_output(actions, actions_identifier, "dep_file.txt") - new_cmd = cmd_args() - new_cmd.add(cmd) - new_cmd.add([ + new_cmd_args = [] + new_cmd_hidden = [] + new_cmd_args.append(cmd) + new_cmd_args.append([ "--used-classes", ] + [ used_classes_json.as_output() @@ -381,16 +383,16 @@ def setup_dep_files( if abi_to_abi_dir_map: abi_to_abi_dir_map_file = declare_prefixed_output(actions, actions_identifier, "abi_to_abi_dir_map") actions.write(abi_to_abi_dir_map_file, abi_to_abi_dir_map) - new_cmd.add([ + new_cmd_args.extend([ "--jar-to-jar-dir-map", abi_to_abi_dir_map_file, ]) - if type(abi_to_abi_dir_map) == "transitive_set_args_projection": - new_cmd.hidden(classpath_jars_tag.tag_artifacts(abi_to_abi_dir_map)) + if isinstance(abi_to_abi_dir_map, TransitiveSetArgsProjection): + new_cmd_hidden.append(classpath_jars_tag.tag_artifacts(abi_to_abi_dir_map)) for hidden_artifact in hidden: - new_cmd.hidden(classpath_jars_tag.tag_artifacts(hidden_artifact)) + new_cmd_hidden.append(classpath_jars_tag.tag_artifacts(hidden_artifact)) - return new_cmd + return cmd_args(new_cmd_args, hidden = new_cmd_hidden) FORCE_PERSISTENT_WORKERS = read_root_config("build", "require_persistent_workers", "false").lower() == "true" @@ -401,21 +403,35 @@ def prepare_cd_exe( compiler: Artifact, main_class: str, worker: WorkerInfo, - debug_port: [int, None], - debug_target: [Label, None], + target_specified_debug_port: [int, None], + toolchain_specified_debug_port: [int, None], + toolchain_specified_debug_target: [Label, None], extra_jvm_args: list[str], extra_jvm_args_target: list[Label]) -> tuple: local_only = False jvm_args = ["-XX:-MaxFDLimit"] + # The variables 'extra_jvm_args' and 'extra_jvm_args_target' are generally used, but they are primarily designed for profiling use-cases. + # The following section is configured with the profiling use-case in mind. if extra_jvm_args_target: - local_only = True - for target in extra_jvm_args_target: - if qualified_name == qualified_name_with_subtarget(target): + if len(extra_jvm_args_target) == 1: + # If there's only one target to profile, we want to isolate its compilation. + # This target should be built in its own action, allowing the worker (if available) to handle the remaining targets. + if qualified_name == qualified_name_with_subtarget(extra_jvm_args_target[0]): jvm_args = jvm_args + extra_jvm_args - local_only = False - break + local_only = True # This flag ensures the target is not run on the worker. + else: + # If there are multiple targets to profile, they should be built on the worker to generate a single profiling data set. + # The remaining targets should be built individually, either locally or on the Remote Execution (RE). + local_only = True # By default, targets are not run on the worker. + for target in extra_jvm_args_target: + # If the current target matches the qualified name with subtarget, it is selected for profiling. + if qualified_name == qualified_name_with_subtarget(target): + jvm_args = jvm_args + extra_jvm_args + local_only = False # This flag allows the target to run on the worker. + break else: + # If no specific target is provided, the extra JVM arguments are added to all targets that run on worker, local machine or RE. jvm_args = jvm_args + extra_jvm_args # Allow JVM compiler daemon to access internal jdk.compiler APIs @@ -427,6 +443,7 @@ def prepare_cd_exe( "--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED", "--add-opens=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED", "--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED", @@ -435,7 +452,14 @@ def prepare_cd_exe( "--add-opens=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED", ] - if debug_port and qualified_name == qualified_name_with_subtarget(debug_target): + if target_specified_debug_port: + debug_port = target_specified_debug_port + elif toolchain_specified_debug_port and qualified_name == qualified_name_with_subtarget(toolchain_specified_debug_target): + debug_port = toolchain_specified_debug_port + else: + debug_port = None + + if debug_port: # Do not use a worker when debugging is enabled local_only = True jvm_args.extend(["-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address={}".format(debug_port)]) @@ -454,27 +478,37 @@ def prepare_cd_exe( ) return worker_run_info, FORCE_PERSISTENT_WORKERS +FinalJarOutput = record( + final_jar = Artifact, + # The same as final_jar unless there is a jar_postprocessor. + preprocessed_jar = Artifact, +) + # If there's additional compiled srcs, we need to merge them in and if the # caller specified an output artifact we need to make sure the jar is in that # location. def prepare_final_jar( actions: AnalysisActions, actions_identifier: [str, None], - output: [Artifact, None], + output: Artifact | None, output_paths: OutputPaths, - additional_compiled_srcs: [Artifact, None], + additional_compiled_srcs: Artifact | None, jar_builder: RunInfo, - jar_postprocessor: [RunInfo, None]) -> Artifact: + jar_postprocessor: [RunInfo, None]) -> FinalJarOutput: + def make_output(jar: Artifact) -> FinalJarOutput: + if jar_postprocessor: + postprocessed_jar = postprocess_jar(actions, jar_postprocessor, jar, actions_identifier) + return FinalJarOutput(final_jar = postprocessed_jar, preprocessed_jar = jar) + else: + return FinalJarOutput(final_jar = jar, preprocessed_jar = jar) + if not additional_compiled_srcs: output_jar = output_paths.jar if output: actions.copy_file(output.as_output(), output_paths.jar) output_jar = output - if jar_postprocessor: - return postprocess_jar(actions, jar_postprocessor, output_jar, actions_identifier) - else: - return output_jar + return make_output(output_jar) merged_jar = output if not merged_jar: @@ -488,29 +522,26 @@ def prepare_final_jar( merged_jar.as_output(), "--entries-to-jar", files_to_merge_file, - ]).hidden(files_to_merge), + ], hidden = files_to_merge), category = "merge_additional_srcs", identifier = actions_identifier, ) - if jar_postprocessor: - return postprocess_jar(actions, jar_postprocessor, merged_jar, actions_identifier) - else: - return merged_jar + return make_output(merged_jar) def generate_abi_jars( actions: AnalysisActions, actions_identifier: [str, None], label: Label, abi_generation_mode: [AbiGenerationMode, None], - additional_compiled_srcs: [Artifact, None], + additional_compiled_srcs: Artifact | None, is_building_android_binary: bool, class_abi_generator: Dependency, final_jar: Artifact, compiling_deps_tset: [JavaCompilingDepsTSet, None], source_only_abi_deps: list[Dependency], - class_abi_jar: [Artifact, None], - class_abi_output_dir: [Artifact, None], + class_abi_jar: Artifact | None, + class_abi_output_dir: Artifact | None, encode_abi_command: typing.Callable, define_action: typing.Callable) -> tuple: class_abi = None diff --git a/prelude/kotlin/kotlin.bzl b/prelude/kotlin/kotlin.bzl index 6f60384e95e..6bff10eac26 100644 --- a/prelude/kotlin/kotlin.bzl +++ b/prelude/kotlin/kotlin.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "VALIDATION_DEPS_ATTR_NAME") load("@prelude//android:build_only_native_code.bzl", "is_build_only_native_code") load("@prelude//android:configuration.bzl", "is_building_android_binary_attr") load("@prelude//java:java.bzl", "AbiGenerationMode", "dex_min_sdk_version") @@ -21,8 +22,8 @@ implemented_rules = { extra_attributes = { "kotlin_library": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.dep(), sorted = True, default = []), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_dex_min_sdk_version": attrs.option(attrs.int(), default = dex_min_sdk_version()), "_dex_toolchain": toolchains_common.dex(), @@ -33,7 +34,7 @@ extra_attributes = { }, "kotlin_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), + "java_agents": attrs.list(attrs.source(), default = []), "resources_root": attrs.option(attrs.string(), default = None), "test_class_names_file": attrs.option(attrs.source(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), diff --git a/prelude/kotlin/kotlin_library.bzl b/prelude/kotlin/kotlin_library.bzl index b58fd9fbc95..edb9e716159 100644 --- a/prelude/kotlin/kotlin_library.bzl +++ b/prelude/kotlin/kotlin_library.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_providers.bzl", "merge_android_packageable_info") load( "@prelude//java:java_library.bzl", @@ -38,6 +39,8 @@ load( ) load("@prelude//kotlin:kotlin_utils.bzl", "get_kotlinc_compatible_target") load("@prelude//kotlin:kotlincd_jar_creator.bzl", "create_jar_artifact_kotlincd") +load("@prelude//utils:argfile.bzl", "at_argfile") +load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:lazy.bzl", "lazy") load("@prelude//utils:utils.bzl", "map_idx") @@ -49,7 +52,7 @@ def _create_kotlin_sources( deps: list[Dependency], annotation_processor_properties: AnnotationProcessorProperties, ksp_annotation_processor_properties: AnnotationProcessorProperties, - additional_classpath_entries: list[Artifact]) -> (Artifact, [Artifact, None], [Artifact, None]): + additional_classpath_entries: list[Artifact]) -> (Artifact, Artifact | None, Artifact | None): """ Runs kotlinc on the provided kotlin sources. """ @@ -59,19 +62,21 @@ def _create_kotlin_sources( kotlinc = kotlin_toolchain.kotlinc[RunInfo] kotlinc_output = ctx.actions.declare_output("kotlinc_classes_output", dir = True) - compile_kotlin_cmd = cmd_args([ + compile_kotlin_cmd_args = [ compile_kotlin_tool, "--kotlinc_output", kotlinc_output.as_output(), - ]) + ] + compile_kotlin_cmd_hidden = [] + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] zip_scrubber_args = ["--zip_scrubber", cmd_args(java_toolchain.zip_scrubber, delimiter = " ")] - compile_kotlin_cmd.add(zip_scrubber_args) + compile_kotlin_cmd_args.append(zip_scrubber_args) kotlinc_cmd_args = cmd_args([kotlinc]) compiling_classpath = [] + additional_classpath_entries - compiling_deps_tset = derive_compiling_deps(ctx.actions, None, deps + kotlin_toolchain.kotlinc_classpath) + compiling_deps_tset = derive_compiling_deps(ctx.actions, None, deps + [kotlin_toolchain.kotlin_stdlib]) if compiling_deps_tset: compiling_classpath.extend( [compiling_dep.abi for compiling_dep in list(compiling_deps_tset.traverse())], @@ -82,17 +87,15 @@ def _create_kotlin_sources( delimiter = get_path_separator_for_exec_os(ctx), ) - # write joined classpath string into args file - classpath_args_file, _ = ctx.actions.write( - "kotlinc_classpath", - classpath_args, - allow_args = True, - ) - - compile_kotlin_cmd.hidden([compiling_classpath]) + compile_kotlin_cmd_hidden.append([compiling_classpath]) kotlinc_cmd_args.add(["-classpath"]) - kotlinc_cmd_args.add(cmd_args(classpath_args_file, format = "@{}")) + kotlinc_cmd_args.add(at_argfile( + actions = ctx.actions, + name = "kotlinc_classpath", + args = classpath_args, + allow_args = True, + )) module_name = ctx.label.package.replace("/", ".") + "." + ctx.label.name kotlinc_cmd_args.add( @@ -113,9 +116,9 @@ def _create_kotlin_sources( kapt_generated_sources_output = None if annotation_processor_properties.annotation_processors: - compile_kotlin_cmd.add(["--kapt_annotation_processing_jar", kotlin_toolchain.annotation_processing_jar[JavaLibraryInfo].library_output.full_library]) - compile_kotlin_cmd.add(["--kapt_annotation_processors", ",".join([p for ap in annotation_processor_properties.annotation_processors for p in ap.processors])]) - compile_kotlin_cmd.add(["--kapt_annotation_processor_params", ";".join(annotation_processor_properties.annotation_processor_params)]) + compile_kotlin_cmd_args.extend(["--kapt_annotation_processing_jar", kotlin_toolchain.annotation_processing_jar[JavaLibraryInfo].library_output.full_library]) + compile_kotlin_cmd_args.extend(["--kapt_annotation_processors", ",".join([p for ap in annotation_processor_properties.annotation_processors for p in ap.processors])]) + compile_kotlin_cmd_args.extend(["--kapt_annotation_processor_params", ";".join(annotation_processor_properties.annotation_processor_params)]) annotation_processor_classpath_tsets = ( filter(None, ([ap.deps for ap in annotation_processor_properties.annotation_processors])) + @@ -126,23 +129,23 @@ def _create_kotlin_sources( children = annotation_processor_classpath_tsets, ).project_as_args("full_jar_args") kapt_classpath_file = ctx.actions.write("kapt_classpath_file", annotation_processor_classpath) - compile_kotlin_cmd.add(["--kapt_classpath_file", kapt_classpath_file]) - compile_kotlin_cmd.hidden(annotation_processor_classpath) + compile_kotlin_cmd_args.extend(["--kapt_classpath_file", kapt_classpath_file]) + compile_kotlin_cmd_hidden.append(annotation_processor_classpath) sources_output = ctx.actions.declare_output("kapt_sources_output") - compile_kotlin_cmd.add(["--kapt_sources_output", sources_output.as_output()]) + compile_kotlin_cmd_args.append(["--kapt_sources_output", sources_output.as_output()]) classes_output = ctx.actions.declare_output("kapt_classes_output") - compile_kotlin_cmd.add(["--kapt_classes_output", classes_output.as_output()]) + compile_kotlin_cmd_args.append(["--kapt_classes_output", classes_output.as_output()]) stubs = ctx.actions.declare_output("kapt_stubs") - compile_kotlin_cmd.add(["--kapt_stubs", stubs.as_output()]) + compile_kotlin_cmd_args.append(["--kapt_stubs", stubs.as_output()]) kapt_generated_sources_output = ctx.actions.declare_output("kapt_generated_sources_output.src.zip") - compile_kotlin_cmd.add(["--kapt_generated_sources_output", kapt_generated_sources_output.as_output()]) - compile_kotlin_cmd.add(["--kapt_base64_encoder", cmd_args(kotlin_toolchain.kapt_base64_encoder[RunInfo], delimiter = " ")]) + compile_kotlin_cmd_args.append(["--kapt_generated_sources_output", kapt_generated_sources_output.as_output()]) + compile_kotlin_cmd_args.append(["--kapt_base64_encoder", cmd_args(kotlin_toolchain.kapt_base64_encoder[RunInfo], delimiter = " ")]) generated_kotlin_output = ctx.actions.declare_output("kapt_generated_kotlin_output") - compile_kotlin_cmd.add(["--kapt_generated_kotlin_output", generated_kotlin_output.as_output()]) + compile_kotlin_cmd_args.append(["--kapt_generated_kotlin_output", generated_kotlin_output.as_output()]) if jvm_target: - compile_kotlin_cmd.add(["--kapt_jvm_target", jvm_target]) + compile_kotlin_cmd_args.append(["--kapt_jvm_target", jvm_target]) friend_paths = ctx.attrs.friend_paths if friend_paths: @@ -155,8 +158,8 @@ def _create_kotlin_sources( ksp_zipped_sources_output = None if ksp_annotation_processor_properties.annotation_processors: - ksp_cmd = cmd_args(compile_kotlin_tool) - ksp_cmd.add(zip_scrubber_args) + ksp_cmd = [compile_kotlin_tool] + ksp_cmd.append(zip_scrubber_args) ksp_annotation_processor_classpath_tsets = filter(None, ([ap.deps for ap in ksp_annotation_processor_properties.annotation_processors])) if ksp_annotation_processor_classpath_tsets: @@ -164,22 +167,24 @@ def _create_kotlin_sources( JavaPackagingDepTSet, children = ksp_annotation_processor_classpath_tsets, ).project_as_args("full_jar_args") - ksp_cmd.add(["--ksp_processor_jars"]) - ksp_cmd.add(cmd_args(ksp_annotation_processor_classpath, delimiter = ",")) + ksp_cmd.append("--ksp_processor_jars") + ksp_cmd.append(cmd_args(ksp_annotation_processor_classpath, delimiter = ",")) - ksp_cmd.add(["--ksp_classpath", classpath_args]) + ksp_cmd.extend(["--ksp_classpath", classpath_args]) ksp_classes_and_resources_output = ctx.actions.declare_output("ksp_output_dir/ksp_classes_and_resources_output") - ksp_cmd.add(["--ksp_classes_and_resources_output", ksp_classes_and_resources_output.as_output()]) - ksp_output = cmd_args(ksp_classes_and_resources_output.as_output()).parent() - ksp_cmd.add(["--ksp_output", ksp_output]) + ksp_cmd.extend(["--ksp_classes_and_resources_output", ksp_classes_and_resources_output.as_output()]) + ksp_output = cmd_args(ksp_classes_and_resources_output.as_output(), parent = 1) + ksp_cmd.extend(["--ksp_output", ksp_output]) ksp_sources_output = ctx.actions.declare_output("ksp_output_dir/ksp_sources_output") - ksp_cmd.add(["--ksp_sources_output", ksp_sources_output.as_output()]) + ksp_cmd.extend(["--ksp_sources_output", ksp_sources_output.as_output()]) ksp_zipped_sources_output = ctx.actions.declare_output("ksp_output_dir/ksp_zipped_sources_output.src.zip") - ksp_cmd.add(["--ksp_zipped_sources_output", ksp_zipped_sources_output.as_output()]) - ksp_cmd.add(["--ksp_project_base_dir", ctx.label.path]) + ksp_cmd.extend(["--ksp_zipped_sources_output", ksp_zipped_sources_output.as_output()]) + ksp_cmd.extend(["--ksp_project_base_dir", ctx.label.path]) ksp_kotlinc_cmd_args = cmd_args(kotlinc_cmd_args) - _add_plugins(ctx, ksp_kotlinc_cmd_args, ksp_cmd, is_ksp = True) + plugins_cmd_args = _add_plugins(ctx, is_ksp = True) + ksp_kotlinc_cmd_args.add(plugins_cmd_args.kotlinc_cmd_args) + ksp_cmd.append(plugins_cmd_args.compile_kotlin_cmd) ksp_cmd_args_file, _ = ctx.actions.write( "ksp_kotlinc_cmd", @@ -187,21 +192,24 @@ def _create_kotlin_sources( allow_args = True, ) - ksp_cmd.add("--kotlinc_cmd_file") - ksp_cmd.add(ksp_cmd_args_file) - ksp_cmd.hidden(ksp_kotlinc_cmd_args) + ksp_cmd.extend(["--kotlinc_cmd_file", ksp_cmd_args_file]) - ctx.actions.run(ksp_cmd, category = "ksp_kotlinc") + ctx.actions.run( + cmd_args(ksp_cmd, hidden = ksp_kotlinc_cmd_args), + category = "ksp_kotlinc", + ) zipped_sources = (zipped_sources or []) + [ksp_zipped_sources_output] - compile_kotlin_cmd.add(["--ksp_generated_classes_and_resources", ksp_classes_and_resources_output]) + compile_kotlin_cmd_args.extend(["--ksp_generated_classes_and_resources", ksp_classes_and_resources_output]) - _add_plugins(ctx, kotlinc_cmd_args, compile_kotlin_cmd, is_ksp = False) + plugin_cmd_args = _add_plugins(ctx, is_ksp = False) + kotlinc_cmd_args.add(plugin_cmd_args.kotlinc_cmd_args) + compile_kotlin_cmd_args.append(plugin_cmd_args.compile_kotlin_cmd) if zipped_sources: zipped_sources_file = ctx.actions.write("kotlinc_zipped_source_args", zipped_sources) - compile_kotlin_cmd.add(["--zipped_sources_file", zipped_sources_file]) - compile_kotlin_cmd.hidden(zipped_sources) + compile_kotlin_cmd_args.append(["--zipped_sources_file", zipped_sources_file]) + compile_kotlin_cmd_hidden.append(zipped_sources) args_file, _ = ctx.actions.write( "kotlinc_cmd", @@ -209,24 +217,32 @@ def _create_kotlin_sources( allow_args = True, ) - compile_kotlin_cmd.hidden([plain_sources]) + compile_kotlin_cmd_hidden.append(plain_sources) - compile_kotlin_cmd.add("--kotlinc_cmd_file") - compile_kotlin_cmd.add(args_file) - compile_kotlin_cmd.hidden(kotlinc_cmd_args) + compile_kotlin_cmd_args.append("--kotlinc_cmd_file") + compile_kotlin_cmd_args.append(args_file) + compile_kotlin_cmd_hidden.append(kotlinc_cmd_args) - ctx.actions.run(compile_kotlin_cmd, category = "kotlinc") + ctx.actions.run( + cmd_args(compile_kotlin_cmd_args, hidden = compile_kotlin_cmd_hidden), + category = "kotlinc", + ) return kotlinc_output, kapt_generated_sources_output, ksp_zipped_sources_output def _is_ksp_plugin(plugin: str) -> bool: return "symbol-processing" in plugin +_PluginCmdArgs = record( + kotlinc_cmd_args = cmd_args, + compile_kotlin_cmd = cmd_args, +) + def _add_plugins( ctx: AnalysisContext, - kotlinc_cmd_args: cmd_args, - compile_kotlin_cmd: cmd_args, - is_ksp: bool): + is_ksp: bool) -> _PluginCmdArgs: + kotlinc_cmd_args = cmd_args() + compile_kotlin_cmd = cmd_args() for plugin, plugin_options in ctx.attrs.kotlin_compiler_plugins.items(): if _is_ksp_plugin(str(plugin)) != is_ksp: continue @@ -245,6 +261,8 @@ def _add_plugins( if options: kotlinc_cmd_args.add(["-P", cmd_args(options, delimiter = ",")]) + return _PluginCmdArgs(kotlinc_cmd_args = kotlinc_cmd_args, compile_kotlin_cmd = compile_kotlin_cmd) + def kotlin_library_impl(ctx: AnalysisContext) -> list[Provider]: packaging_deps = ctx.attrs.deps + ctx.attrs.exported_deps + ctx.attrs.runtime_deps @@ -264,14 +282,27 @@ def kotlin_library_impl(ctx: AnalysisContext) -> list[Provider]: android_packageable_info, ] - java_providers = build_kotlin_library(ctx) + java_providers = build_kotlin_library( + ctx = ctx, + validation_deps_outputs = get_validation_deps_outputs(ctx), + ) return to_list(java_providers) + [android_packageable_info] +def _check_exported_deps(exported_deps: list[Dependency], attr_name: str): + for exported_dep in exported_deps: + # TODO(navidq) add a check that the exported dep always have a JavaLibraryInfo provider + if JavaLibraryInfo in exported_dep: + expect( + not exported_dep[JavaLibraryInfo].may_not_be_exported, + "{} has 'may_not_be_exported' label and should not be present in {}.".format(exported_dep.label.raw_target(), attr_name), + ) + def build_kotlin_library( ctx: AnalysisContext, additional_classpath_entries: list[Artifact] = [], bootclasspath_entries: list[Artifact] = [], - extra_sub_targets: dict = {}) -> JavaProviders: + extra_sub_targets: dict = {}, + validation_deps_outputs: [list[Artifact], None] = None) -> JavaProviders: srcs = ctx.attrs.srcs has_kotlin_srcs = lazy.is_any(lambda src: src.extension == ".kt" or src.basename.endswith(".src.zip") or src.basename.endswith("-sources.jar"), srcs) @@ -284,11 +315,19 @@ def build_kotlin_library( # Match buck1, which always does class ABI generation for Kotlin targets unless explicitly specified. override_abi_generation_mode = get_abi_generation_mode(ctx.attrs.abi_generation_mode) or AbiGenerationMode("class"), extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ) else: + compose_stability_config = getattr(ctx.attrs, "compose_stability_config", None) + if compose_stability_config != None: + ctx.attrs.extra_kotlinc_arguments.append("-P") + ctx.attrs.extra_kotlinc_arguments.append(cmd_args(["plugin:androidx.compose.compiler.plugins.kotlin:stabilityConfigurationPath", ctx.attrs._compose_stability_config], delimiter = "=")) + deps_query = getattr(ctx.attrs, "deps_query", []) or [] provided_deps_query = getattr(ctx.attrs, "provided_deps_query", []) or [] + _check_exported_deps(ctx.attrs.exported_deps, "exported_deps") + _check_exported_deps(ctx.attrs.exported_provided_deps, "exported_provided_deps") deps = ( ctx.attrs.deps + deps_query + @@ -304,7 +343,7 @@ def build_kotlin_library( ctx.attrs.annotation_processor_params, ctx.attrs.annotation_processor_deps, ) - ksp_annotation_processor_properties = create_ksp_annotation_processor_properties(ctx, ctx.attrs.plugins) + ksp_annotation_processor_properties = create_ksp_annotation_processor_properties(ctx.attrs.plugins) kotlin_toolchain = ctx.attrs._kotlin_toolchain[KotlinToolchainInfo] if kotlin_toolchain.kotlinc_protocol == "classic": @@ -331,11 +370,18 @@ def build_kotlin_library( additional_compiled_srcs = kotlinc_classes, generated_sources = filter(None, [kapt_generated_sources, ksp_generated_sources]), extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ) return java_lib elif kotlin_toolchain.kotlinc_protocol == "kotlincd": source_level, target_level = get_java_version_attributes(ctx) - extra_arguments = cmd_args(ctx.attrs.extra_arguments) + extra_arguments = cmd_args( + ctx.attrs.extra_arguments, + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + hidden = validation_deps_outputs or [], + ) + common_kotlincd_kwargs = { "abi_generation_mode": get_abi_generation_mode(ctx.attrs.abi_generation_mode), "actions": ctx.actions, @@ -345,9 +391,11 @@ def build_kotlin_library( annotation_processor_params = annotation_processor_properties.annotation_processor_params + ksp_annotation_processor_properties.annotation_processor_params, ), "bootclasspath_entries": bootclasspath_entries, - "deps": deps, + "debug_port": getattr(ctx.attrs, "debug_port", None), + "deps": deps + [kotlin_toolchain.kotlin_stdlib], "extra_kotlinc_arguments": ctx.attrs.extra_kotlinc_arguments, "friend_paths": ctx.attrs.friend_paths, + "incremental": ctx.attrs.incremental, "is_building_android_binary": ctx.attrs._is_building_android_binary, "jar_postprocessor": ctx.attrs.jar_postprocessor[RunInfo] if hasattr(ctx.attrs, "jar_postprocessor") and ctx.attrs.jar_postprocessor else None, "java_toolchain": ctx.attrs._java_toolchain[JavaToolchainInfo], @@ -373,6 +421,11 @@ def build_kotlin_library( **common_kotlincd_kwargs ) + if outputs and outputs.incremental_state_dir: + extra_sub_targets = extra_sub_targets | {"incremental_state_dir": [ + DefaultInfo(default_output = outputs.incremental_state_dir), + ]} + if outputs and outputs.annotation_processor_output: generated_sources = [outputs.annotation_processor_output] extra_sub_targets = extra_sub_targets | {"generated_sources": [ @@ -402,9 +455,18 @@ def build_kotlin_library( DefaultInfo(default_output = nullsafe_info.output), ]} - java_library_info, java_packaging_info, shared_library_info, cxx_resource_info, linkable_graph, template_placeholder_info, intellij_info = create_java_library_providers( + class_to_src_map, sources_jar, class_to_src_map_sub_targets = get_class_to_source_map_info( + ctx, + outputs = outputs, + deps = ctx.attrs.deps + deps_query + ctx.attrs.exported_deps, + generate_sources_jar = True, + ) + extra_sub_targets = extra_sub_targets | class_to_src_map_sub_targets + + java_library_info, java_packaging_info, global_code_info, shared_library_info, cxx_resource_info, linkable_graph, template_placeholder_info, intellij_info = create_java_library_providers( ctx, library_output = outputs.classpath_entry if outputs else None, + global_code_config = java_toolchain.global_code_config, declared_deps = ctx.attrs.deps + deps_query, exported_deps = ctx.attrs.exported_deps, provided_deps = ctx.attrs.provided_deps + provided_deps_query, @@ -413,15 +475,10 @@ def build_kotlin_library( needs_desugar = source_level > 7 or target_level > 7, generated_sources = generated_sources, has_srcs = bool(srcs), + sources_jar = sources_jar, + preprocessed_library = outputs.preprocessed_library if outputs else None, ) - class_to_src_map, class_to_src_map_sub_targets = get_class_to_source_map_info( - ctx, - outputs = outputs, - deps = ctx.attrs.deps + deps_query + ctx.attrs.exported_deps, - ) - extra_sub_targets = extra_sub_targets | class_to_src_map_sub_targets - default_info = get_default_info( ctx.actions, ctx.attrs._java_toolchain[JavaToolchainInfo], @@ -433,6 +490,7 @@ def build_kotlin_library( java_library_info = java_library_info, java_library_intellij_info = intellij_info, java_packaging_info = java_packaging_info, + java_global_code_info = global_code_info, shared_library_info = shared_library_info, cxx_resource_info = cxx_resource_info, linkable_graph = linkable_graph, diff --git a/prelude/kotlin/kotlin_toolchain.bzl b/prelude/kotlin/kotlin_toolchain.bzl index 0c863de11aa..ef9e094e64a 100644 --- a/prelude/kotlin/kotlin_toolchain.bzl +++ b/prelude/kotlin/kotlin_toolchain.bzl @@ -11,31 +11,30 @@ KotlinToolchainInfo = provider( # @unsorted-dict-items doc = "Kotlin toolchain info", fields = { + "allow_k2_usage": provider_field(typing.Any, default = None), "annotation_processing_jar": provider_field(typing.Any, default = None), "class_loader_bootstrapper": provider_field(typing.Any, default = None), - "compilation_tracer_plugin": provider_field(typing.Any, default = None), "compile_kotlin": provider_field(typing.Any, default = None), "dep_files": provider_field(typing.Any, default = None), + "enable_incremental_compilation": provider_field(typing.Any, default = None), "jvm_abi_gen_plugin": provider_field(typing.Any, default = None), "kapt_base64_encoder": provider_field(typing.Any, default = None), "kosabi_applicability_plugin": provider_field(typing.Any, default = None), "kosabi_jvm_abi_gen_plugin": provider_field(typing.Any, default = None), + "kosabi_standalone": provider_field(typing.Any, default = None), "kosabi_stubs_gen_plugin": provider_field(typing.Any, default = None), - "kosabi_supported_ksp_providers": provider_field(typing.Any, default = None), "kotlin_error_handler": provider_field(typing.Any, default = None), "kotlin_home_libraries": provider_field(typing.Any, default = None), "kotlin_stdlib": provider_field(typing.Any, default = None), "kotlinc": provider_field(typing.Any, default = None), - "kotlinc_classpath": provider_field(typing.Any, default = None), "kotlinc_protocol": provider_field(typing.Any, default = None), + "kotlinc_run_via_build_tools_api": provider_field(typing.Any, default = None), "kotlincd_debug_port": provider_field(typing.Any, default = None), "kotlincd_debug_target": provider_field(typing.Any, default = None), "kotlincd_jvm_args": provider_field(typing.Any, default = None), "kotlincd_jvm_args_target": provider_field(typing.Any, default = None), "kotlincd_main_class": provider_field(typing.Any, default = None), "kotlincd_worker": provider_field(typing.Any, default = None), - "qpld_dotslash": provider_field(typing.Any, default = None), - "should_use_compilation_tracer": provider_field(typing.Any, default = None), "track_class_usage_plugin": provider_field(typing.Any, default = None), }, ) diff --git a/prelude/kotlin/kotlincd_jar_creator.bzl b/prelude/kotlin/kotlincd_jar_creator.bzl index 0eaf5681c7e..a524df4a8ea 100644 --- a/prelude/kotlin/kotlincd_jar_creator.bzl +++ b/prelude/kotlin/kotlincd_jar_creator.bzl @@ -10,6 +10,7 @@ load( "JavaClasspathEntry", # @unused Used as a type "JavaCompileOutputs", # @unused Used as a type "JavaLibraryInfo", + "generate_java_classpath_snapshot", "make_compile_outputs", ) load("@prelude//java:java_resources.bzl", "get_resources_map") @@ -26,7 +27,6 @@ load( "@prelude//jvm:cd_jar_creator_util.bzl", "OutputPaths", "TargetType", - "add_output_paths_to_cmd_args", "base_qualified_name", "declare_prefixed_output", "define_output_paths", @@ -34,6 +34,7 @@ load( "encode_jar_params", "generate_abi_jars", "get_compiling_deps_tset", + "output_paths_to_hidden_cmd_args", "prepare_cd_exe", "prepare_final_jar", "setup_dep_files", @@ -43,11 +44,6 @@ load("@prelude//kotlin:kotlin_utils.bzl", "get_kotlinc_compatible_target") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "map_idx") -buckPaths = struct( - configuredBuckOut = "buck-out/v2", - includeTargetConfigHash = True, -) - def create_jar_artifact_kotlincd( actions: AnalysisActions, actions_identifier: [str, None], @@ -62,7 +58,7 @@ def create_jar_artifact_kotlincd( resources_root: [str, None], annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], - manifest_file: [Artifact, None], + manifest_file: Artifact | None, source_level: int, target_level: int, deps: list[Dependency], @@ -74,11 +70,13 @@ def create_jar_artifact_kotlincd( is_building_android_binary: bool, friend_paths: list[Dependency], kotlin_compiler_plugins: dict, - extra_kotlinc_arguments: list[str], + extra_kotlinc_arguments: list, k2: bool, + incremental: bool, is_creating_subtarget: bool = False, optional_dirs: list[OutputArtifact] = [], - jar_postprocessor: [RunInfo, None] = None) -> JavaCompileOutputs: + jar_postprocessor: [RunInfo, None] = None, + debug_port: [int, None] = None) -> JavaCompileOutputs: resources_map = get_resources_map( java_toolchain = java_toolchain, package = label.package, @@ -107,7 +105,10 @@ def create_jar_artifact_kotlincd( jvm_abi_gen = None should_use_jvm_abi_gen = False - def encode_kotlin_extra_params(kotlin_compiler_plugins): + should_kotlinc_run_incrementally = kotlin_toolchain.enable_incremental_compilation and incremental + incremental_state_dir = declare_prefixed_output(actions, actions_identifier, "incremental_state", dir = True) if should_kotlinc_run_incrementally else None + + def encode_kotlin_extra_params(kotlin_compiler_plugins, incremental_state_dir = None): kosabiPluginOptionsMap = {} if kotlin_toolchain.kosabi_stubs_gen_plugin != None: kosabiPluginOptionsMap["kosabi_stubs_gen_plugin"] = kotlin_toolchain.kosabi_stubs_gen_plugin @@ -118,12 +119,23 @@ def create_jar_artifact_kotlincd( if kotlin_toolchain.kosabi_jvm_abi_gen_plugin != None: kosabiPluginOptionsMap["kosabi_jvm_abi_gen_plugin"] = kotlin_toolchain.kosabi_jvm_abi_gen_plugin + current_language_version = None + for arg in extra_kotlinc_arguments: + # If `-language-version` is defined multiple times, we use the last one, just like the compiler does + if isinstance(arg, str) and "-language-version" in arg: + current_language_version = arg.split("=")[1].strip() + + if k2 == True and kotlin_toolchain.allow_k2_usage: + if not current_language_version or current_language_version < "2.0": + extra_kotlinc_arguments.append("-language-version=2.0") + else: # use K1 + if not current_language_version or current_language_version >= "2.0": + extra_kotlinc_arguments.append("-language-version=1.9") + return struct( extraClassPaths = bootclasspath_entries, standardLibraryClassPath = kotlin_toolchain.kotlin_stdlib[JavaLibraryInfo].library_output.full_library, annotationProcessingClassPath = kotlin_toolchain.annotation_processing_jar[JavaLibraryInfo].library_output.full_library, - compilationTracerPlugin = kotlin_toolchain.compilation_tracer_plugin, - qpldDotslash = kotlin_toolchain.qpld_dotslash, jvmAbiGenPlugin = kotlin_toolchain.jvm_abi_gen_plugin, kotlinCompilerPlugins = {plugin: {"params": plugin_options} if plugin_options else {} for plugin, plugin_options in kotlin_compiler_plugins.items()}, kosabiPluginOptions = struct(**kosabiPluginOptionsMap), @@ -131,19 +143,17 @@ def create_jar_artifact_kotlincd( kotlinHomeLibraries = kotlin_toolchain.kotlin_home_libraries, jvmTarget = get_kotlinc_compatible_target(str(target_level)), kosabiJvmAbiGenEarlyTerminationMessagePrefix = "exception: java.lang.RuntimeException: Terminating compilation. We're done with ABI.", - kosabiSupportedKspProviders = kotlin_toolchain.kosabi_supported_ksp_providers, - shouldUseCompilationTracer = kotlin_toolchain.should_use_compilation_tracer, shouldUseJvmAbiGen = should_use_jvm_abi_gen, shouldVerifySourceOnlyAbiConstraints = actual_abi_generation_mode == AbiGenerationMode("source_only"), shouldGenerateAnnotationProcessingStats = True, extraKotlincArguments = extra_kotlinc_arguments, - extraNonSourceOnlyAbiKotlincArguments = ["-language-version=2.0"] if k2 else [], - shouldRemoveKotlinCompilerFromClassPath = True, depTrackerPlugin = kotlin_toolchain.track_class_usage_plugin, + shouldKotlincRunViaBuildToolsApi = kotlin_toolchain.kotlinc_run_via_build_tools_api, + shouldKotlincRunIncrementally = should_kotlinc_run_incrementally, + incrementalStateDir = incremental_state_dir.as_output() if incremental_state_dir else None, + shouldUseStandaloneKosabi = kotlin_toolchain.kosabi_standalone, ) - kotlin_extra_params = encode_kotlin_extra_params(kotlin_compiler_plugins) - compiling_deps_tset = get_compiling_deps_tset(actions, deps, additional_classpath_entries) # external javac does not support used classes @@ -152,7 +162,8 @@ def create_jar_artifact_kotlincd( def encode_library_command( output_paths: OutputPaths, path_to_class_hashes: Artifact, - classpath_jars_tag: ArtifactTag) -> struct: + classpath_jars_tag: ArtifactTag, + incremental_state_dir: Artifact | None) -> struct: target_type = TargetType("library") base_jar_command = encode_base_jar_command( javac_tool, @@ -174,6 +185,7 @@ def create_jar_artifact_kotlincd( extra_arguments = cmd_args(extra_arguments), source_only_abi_compiling_deps = [], track_class_usage = track_class_usage, + is_incremental = should_kotlinc_run_incrementally, ) return struct( @@ -182,7 +194,7 @@ def create_jar_artifact_kotlincd( hasAnnotationProcessing = True, ), libraryJarCommand = struct( - kotlinExtraParams = kotlin_extra_params, + kotlinExtraParams = encode_kotlin_extra_params(kotlin_compiler_plugins, incremental_state_dir), baseJarCommand = base_jar_command, libraryJarBaseCommand = struct( pathToClasses = output_paths.jar.as_output(), @@ -221,7 +233,7 @@ def create_jar_artifact_kotlincd( ) abi_params = encode_jar_params(remove_classes, output_paths, manifest_file) abi_command = struct( - kotlinExtraParams = kotlin_extra_params, + kotlinExtraParams = encode_kotlin_extra_params(kotlin_compiler_plugins), baseJarCommand = base_jar_command, abiJarParameters = abi_params, ) @@ -242,11 +254,12 @@ def create_jar_artifact_kotlincd( qualified_name: str, output_paths: OutputPaths, classpath_jars_tag: ArtifactTag, - abi_dir: [Artifact, None], + abi_dir: Artifact | None, target_type: TargetType, - path_to_class_hashes: [Artifact, None], + path_to_class_hashes: Artifact | None, source_only_abi_compiling_deps: list[JavaClasspathEntry] = [], - is_creating_subtarget: bool = False): + is_creating_subtarget: bool = False, + incremental_state_dir: Artifact | None = None): _unused = source_only_abi_compiling_deps proto = declare_prefixed_output(actions, actions_identifier, "jar_command.proto.json") @@ -260,8 +273,9 @@ def create_jar_artifact_kotlincd( compiler = compiler, main_class = kotlin_toolchain.kotlincd_main_class, worker = kotlin_toolchain.kotlincd_worker[WorkerInfo], - debug_port = kotlin_toolchain.kotlincd_debug_port, - debug_target = kotlin_toolchain.kotlincd_debug_target, + target_specified_debug_port = debug_port, + toolchain_specified_debug_port = kotlin_toolchain.kotlincd_debug_port, + toolchain_specified_debug_target = kotlin_toolchain.kotlincd_debug_target, extra_jvm_args = kotlin_toolchain.kotlincd_jvm_args, extra_jvm_args_target = kotlin_toolchain.kotlincd_jvm_args_target, ) @@ -300,9 +314,12 @@ def create_jar_artifact_kotlincd( optional_dirs, ) - args = add_output_paths_to_cmd_args(args, output_paths, path_to_class_hashes) - - event_pipe_out = declare_prefixed_output(actions, actions_identifier, "events.data") + if incremental_state_dir: + args.add( + "--incremental-state-dir", + incremental_state_dir.as_output(), + ) + args.add(output_paths_to_hidden_cmd_args(output_paths, path_to_class_hashes)) dep_files = {} if not is_creating_subtarget and srcs and (kotlin_toolchain.dep_files == DepFiles("per_jar") or kotlin_toolchain.dep_files == DepFiles("per_class")) and target_type == TargetType("library") and track_class_usage: @@ -321,25 +338,31 @@ def create_jar_artifact_kotlincd( dep_files["classpath_jars"] = classpath_jars_tag + common_params = { + "metadata_env_var": "ACTION_METADATA", + "metadata_path": "action_metadata.json", + "no_outputs_cleanup": True, + } if (incremental_state_dir != None) and ("nullsafe" != actions_identifier) else {} actions.run( args, env = { "BUCK_CLASSPATH": compiler, - "BUCK_EVENT_PIPE": event_pipe_out.as_output(), "JAVACD_ABSOLUTE_PATHS_ARE_RELATIVE_TO_CWD": "1", }, category = "{}kotlincd_jar".format(category_prefix), identifier = actions_identifier, dep_files = dep_files, + allow_dep_file_cache_upload = False, exe = exe, local_only = local_only, low_pass_filter = False, weight = 2, error_handler = kotlin_toolchain.kotlin_error_handler, + **common_params ) library_classpath_jars_tag = actions.artifact_tag() - command = encode_library_command(output_paths, path_to_class_hashes_out, library_classpath_jars_tag) + command = encode_library_command(output_paths, path_to_class_hashes_out, library_classpath_jars_tag, incremental_state_dir) define_kotlincd_action( category_prefix = "", actions_identifier = actions_identifier, @@ -351,9 +374,10 @@ def create_jar_artifact_kotlincd( target_type = TargetType("library"), path_to_class_hashes = path_to_class_hashes_out, is_creating_subtarget = is_creating_subtarget, + incremental_state_dir = incremental_state_dir, ) - final_jar = prepare_final_jar( + final_jar_output = prepare_final_jar( actions = actions, actions_identifier = actions_identifier, output = None, @@ -373,7 +397,7 @@ def create_jar_artifact_kotlincd( additional_compiled_srcs = None, is_building_android_binary = is_building_android_binary, class_abi_generator = java_toolchain.class_abi_generator, - final_jar = final_jar, + final_jar = final_jar_output.final_jar, compiling_deps_tset = compiling_deps_tset, source_only_abi_deps = source_only_abi_deps, class_abi_jar = class_abi_jar, @@ -381,18 +405,25 @@ def create_jar_artifact_kotlincd( encode_abi_command = encode_abi_command, define_action = define_kotlincd_action, ) + abi_jar_snapshot = generate_java_classpath_snapshot(actions, java_toolchain.cp_snapshot_generator, classpath_abi, actions_identifier) return make_compile_outputs( - full_library = final_jar, + full_library = final_jar_output.final_jar, + preprocessed_library = final_jar_output.preprocessed_jar, class_abi = class_abi, source_only_abi = source_only_abi, classpath_abi = classpath_abi, classpath_abi_dir = classpath_abi_dir, required_for_source_only_abi = required_for_source_only_abi, annotation_processor_output = output_paths.annotations, + incremental_state_dir = incremental_state_dir, + abi_jar_snapshot = abi_jar_snapshot, ) else: + full_jar_snapshot = generate_java_classpath_snapshot(actions, java_toolchain.cp_snapshot_generator, final_jar_output.final_jar, actions_identifier) return make_compile_outputs( - full_library = final_jar, + full_library = final_jar_output.final_jar, + preprocessed_library = final_jar_output.preprocessed_jar, required_for_source_only_abi = required_for_source_only_abi, annotation_processor_output = output_paths.annotations, + abi_jar_snapshot = full_jar_snapshot, ) diff --git a/prelude/kotlin/tools/compile_kotlin/BUCK.v2 b/prelude/kotlin/tools/compile_kotlin/BUCK.v2 index 0033a1bd059..5ae64be14ef 100644 --- a/prelude/kotlin/tools/compile_kotlin/BUCK.v2 +++ b/prelude/kotlin/tools/compile_kotlin/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( diff --git a/prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 b/prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 index bf49427e7f5..e44d3e6263e 100644 --- a/prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 +++ b/prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 @@ -1,4 +1,9 @@ load("@prelude//kotlin/tools:defs.bzl", "java_bootstrap_binary", "java_bootstrap_library") +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() java_bootstrap_library( name = "kapt_base64_encoder_lib", diff --git a/prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java b/prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java index 5da2b5fac50..d6840bdeb2e 100644 --- a/prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java +++ b/prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java @@ -1,4 +1,11 @@ -// (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary. +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under both the MIT license found in the + * LICENSE-MIT file in the root directory of this source tree and the Apache + * License, Version 2.0 found in the LICENSE-APACHE file in the root directory + * of this source tree. + */ package com.facebook.kapt; diff --git a/prelude/linking/link_groups.bzl b/prelude/linking/link_groups.bzl index 6d6bdbef3c0..a3fb2e1c59c 100644 --- a/prelude/linking/link_groups.bzl +++ b/prelude/linking/link_groups.bzl @@ -12,7 +12,10 @@ load( load( ":link_info.bzl", "LinkInfos", - "LinkedObject", +) +load( + ":shared_libraries.bzl", + "SharedLibraries", ) # Information about a linkable node which explicitly sets `link_group`. @@ -20,7 +23,7 @@ LinkGroupLib = record( # The label of the owning target (if any). label = field([Label, None], None), # The shared libs to package for this link group. - shared_libs = field(dict[str, LinkedObject]), + shared_libs = field(SharedLibraries), # The link info to link against this link group. shared_link_infos = field(LinkInfos), ) @@ -48,7 +51,7 @@ def gather_link_group_libs( def merge_link_group_lib_info( label: [Label, None] = None, name: [str, None] = None, - shared_libs: [dict[str, LinkedObject], None] = None, + shared_libs: [SharedLibraries, None] = None, shared_link_infos: [LinkInfos, None] = None, deps: list[Dependency] = [], children: list[LinkGroupLibInfo] = []) -> LinkGroupLibInfo: diff --git a/prelude/linking/link_info.bzl b/prelude/linking/link_info.bzl index 856e4f81186..b86f80fae91 100644 --- a/prelude/linking/link_info.bzl +++ b/prelude/linking/link_info.bzl @@ -10,18 +10,19 @@ load( "ArtifactTSet", "make_artifact_tset", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "LinkerType", + "PicBehavior", +) load( "@prelude//cxx:linker.bzl", "get_link_whole_args", "get_no_as_needed_shared_libs_flags", "get_objects_as_library_args", ) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") -load( - "@prelude//utils:utils.bzl", - "flatten", -) # Represents an archive (.a file) Archive = record( @@ -74,23 +75,13 @@ def default_output_style_for_link_strategy(link_strategy: LinkStrategy) -> LibOu return LibOutputStyle("pic_archive") return LibOutputStyle("shared_lib") -# Ways a library can request to be linked (e.g. usually specific via a rule -# param like `preferred_linkage`. The actual link style used for a library is -# usually determined by a combination of this and the link style being exported -# via a provider. -Linkage = enum( - "static", - "shared", - "any", -) - # An archive. ArchiveLinkable = record( # Artifact in the .a format from ar archive = field(Archive), # If a bitcode bundle was created for this artifact it will be present here - bitcode_bundle = field([Artifact, None], None), - linker_type = field(str), + bitcode_bundle = field(Artifact | None, None), + linker_type = field(LinkerType), link_whole = field(bool, False), # Indicates if this archive may contain LTO bit code. Can be set to `False` # to e.g. tell dist LTO handling that a potentially expensive archive doesn't @@ -108,8 +99,8 @@ SharedLibLinkable = record( ObjectsLinkable = record( objects = field([list[Artifact], None], None), # Any of the objects that are in bitcode format - bitcode_bundle = field([Artifact, None], None), - linker_type = field(str), + bitcode_bundle = field(Artifact | None, None), + linker_type = field(LinkerType), link_whole = field(bool, False), ) @@ -143,7 +134,14 @@ SwiftRuntimeLinkable = record( runtime_required = field(bool, False), ) -LinkableTypes = [ArchiveLinkable, SharedLibLinkable, ObjectsLinkable, FrameworksLinkable, SwiftRuntimeLinkable, SwiftmoduleLinkable] +LinkableTypes = [ + ArchiveLinkable, + SharedLibLinkable, + ObjectsLinkable, + FrameworksLinkable, + SwiftRuntimeLinkable, + SwiftmoduleLinkable, +] LinkerFlags = record( flags = field(list[typing.Any], []), @@ -157,6 +155,7 @@ LinkInfo = record( # An informative name for this LinkInfo. This may be used in user messages # or when constructing intermediate output paths and does not need to be unique. name = field([str, None], None), + dist_thin_lto_codegen_flags = field(list[typing.Any], []), # Opaque cmd_arg-likes to be added pre/post this item on a linker command line. pre_flags = field(list[typing.Any], []), post_flags = field(list[typing.Any], []), @@ -223,38 +222,55 @@ def wrap_link_info( external_debug_info = inner.external_debug_info, ) +# Returns true if the command line argument representation of this linkable, +# could be passed within a filelist. +def _is_linkable_included_in_filelist(linkable: LinkableTypes) -> bool: + if isinstance(linkable, ArchiveLinkable): + # Link whole archives don't appear in the filelist, but are passed directly to the linker + # with a -force-load (MachO) or -whole-archive (ELF) flag. Regular archives do appear in the filelist. + return not linkable.link_whole + elif isinstance(linkable, SharedLibLinkable) or \ + isinstance(linkable, FrameworksLinkable) or \ + isinstance(linkable, SwiftRuntimeLinkable) or \ + isinstance(linkable, SwiftmoduleLinkable): + # These are all passed directly via various command line flags, not via a filelist. + return False + elif isinstance(linkable, ObjectsLinkable): + # Object files always appear in the filelist. + return True + else: + fail("Encountered unhandled filelist-like linkable {}".format(str(linkable))) + # Adds appropriate args representing `linkable` to `args` def append_linkable_args(args: cmd_args, linkable: LinkableTypes): if isinstance(linkable, ArchiveLinkable): if linkable.link_whole: args.add(get_link_whole_args(linkable.linker_type, [linkable.archive.artifact])) - elif linkable.linker_type == "darwin": - pass else: args.add(linkable.archive.artifact) # When using thin archives, object files are implicitly used as inputs # to the link, so make sure track them as inputs so that they're # materialized/tracked properly. - args.add(cmd_args().hidden(linkable.archive.external_objects)) + args.add(cmd_args(hidden = linkable.archive.external_objects)) elif isinstance(linkable, SharedLibLinkable): if linkable.link_without_soname: - args.add(cmd_args(linkable.lib, format = "-L{}").parent()) + args.add(cmd_args(linkable.lib, format = "-L{}", parent = 1)) args.add("-l" + linkable.lib.basename.removeprefix("lib").removesuffix(linkable.lib.extension)) else: args.add(linkable.lib) elif isinstance(linkable, ObjectsLinkable): - # We depend on just the filelist for darwin linker and don't add the normal args - if linkable.linker_type != "darwin": - # We need to export every symbol when link groups are used, but enabling - # --whole-archive with --start-lib is undefined behavior in gnu linkers: - # https://reviews.llvm.org/D120443. We need to export symbols from every - # linkable in the link_info - if not linkable.link_whole: - args.add(get_objects_as_library_args(linkable.linker_type, linkable.objects)) - else: - args.add(linkable.objects) - elif isinstance(linkable, FrameworksLinkable) or isinstance(linkable, SwiftRuntimeLinkable) or isinstance(linkable, SwiftmoduleLinkable): + # We need to export every symbol when link groups are used, but enabling + # --whole-archive with --start-lib is undefined behavior in gnu linkers: + # https://reviews.llvm.org/D120443. We need to export symbols from every + # linkable in the link_info + if not linkable.link_whole: + args.add(get_objects_as_library_args(linkable.linker_type, linkable.objects)) + else: + args.add(linkable.objects) + elif isinstance(linkable, FrameworksLinkable) or \ + isinstance(linkable, SwiftRuntimeLinkable) or \ + isinstance(linkable, SwiftmoduleLinkable): # These flags are handled separately so they can be deduped. # # We've seen in apps with larger dependency graphs that failing @@ -263,42 +279,33 @@ def append_linkable_args(args: cmd_args, linkable: LinkableTypes): else: fail("Encountered unhandled linkable {}".format(str(linkable))) -def link_info_to_args(value: LinkInfo) -> cmd_args: - args = cmd_args(value.pre_flags) - for linkable in value.linkables: - append_linkable_args(args, linkable) - if False: - # TODO(nga): `post_flags` is never `None`. - def unknown(): - pass - - value = unknown() - if value.post_flags != None: - args.add(value.post_flags) - return args - -# List of inputs to pass to the darwin linker via the `-filelist` param. -# TODO(agallagher): It might be nicer to leave these inlined in the args -# above and extract them at link time via reflection. This way we'd hide -# platform-specific details from this level. -# NOTE(agallagher): Using filelist out-of-band means objects/archives get -# linked out of order of their corresponding flags. -def link_info_filelist(value: LinkInfo) -> list[Artifact]: - filelists = [] +LinkInfoArgumentFilter = enum( + "all", + "filelist_only", + "excluding_filelist", +) + +def link_info_to_args(value: LinkInfo, argument_type_filter: LinkInfoArgumentFilter = LinkInfoArgumentFilter("all")) -> cmd_args: + pre_flags = cmd_args() + post_flags = cmd_args() + if argument_type_filter == LinkInfoArgumentFilter("all") or argument_type_filter == LinkInfoArgumentFilter("excluding_filelist"): + pre_flags.add(value.pre_flags) + post_flags.add(value.post_flags) + + flags = cmd_args() for linkable in value.linkables: - if isinstance(linkable, ArchiveLinkable): - if linkable.linker_type == "darwin" and not linkable.link_whole: - filelists.append(linkable.archive.artifact) - elif isinstance(linkable, SharedLibLinkable): - pass - elif isinstance(linkable, ObjectsLinkable): - if linkable.linker_type == "darwin": - filelists += linkable.objects - elif isinstance(linkable, FrameworksLinkable) or isinstance(linkable, SwiftRuntimeLinkable) or isinstance(linkable, SwiftmoduleLinkable): - pass - else: - fail("Encountered unhandled linkable {}".format(str(linkable))) - return filelists + if argument_type_filter == LinkInfoArgumentFilter("all"): + append_linkable_args(flags, linkable) + elif argument_type_filter == LinkInfoArgumentFilter("filelist_only") and _is_linkable_included_in_filelist(linkable): + append_linkable_args(flags, linkable) + elif argument_type_filter == LinkInfoArgumentFilter("excluding_filelist") and not _is_linkable_included_in_filelist(linkable): + append_linkable_args(flags, linkable) + + result = cmd_args() + result.add(pre_flags) + result.add(flags) + result.add(post_flags) + return result # Encapsulate all `LinkInfo`s provided by a given rule's link style. # @@ -308,45 +315,47 @@ def link_info_filelist(value: LinkInfo) -> list[Artifact]: LinkInfos = record( # Link info to use by default. default = field(LinkInfo), + # Link info for objects compiler with extra optimizations (EXPERIMENTAL) + optimized = field([LinkInfo, None], None), # Link info stripped of debug symbols. stripped = field([LinkInfo, None], None), ) def _link_info_default_args(infos: LinkInfos): info = infos.default - return link_info_to_args(info) - -def _link_info_default_shared_link_args(infos: LinkInfos): - info = infos.default - return link_info_to_args(info) - -def _link_info_stripped_args(infos: LinkInfos): - info = infos.stripped or infos.default - return link_info_to_args(info) + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("all")) -def _link_info_stripped_shared_link_args(infos: LinkInfos): +def _link_info_stripped_link_args(infos: LinkInfos): info = infos.stripped or infos.default - return link_info_to_args(info) + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("all")) def _link_info_default_filelist(infos: LinkInfos): info = infos.default - return link_info_filelist(info) + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("filelist_only")) def _link_info_stripped_filelist(infos: LinkInfos): info = infos.stripped or infos.default - return link_info_filelist(info) + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("filelist_only")) + +def _link_info_default_excluding_filelist_args(infos: LinkInfos): + info = infos.default + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("excluding_filelist")) + +def _link_info_stripped_excluding_filelist_args(infos: LinkInfos): + info = infos.stripped or infos.default + return link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("excluding_filelist")) def _link_info_has_default_filelist(children: list[bool], infos: [LinkInfos, None]) -> bool: if infos: info = infos.default - if link_info_filelist(info): + if len(link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("filelist_only")).inputs): return True return any(children) def _link_info_has_stripped_filelist(children: list[bool], infos: [LinkInfos, None]) -> bool: if infos: info = infos.stripped or infos.default - if link_info_filelist(info): + if len(link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("filelist_only")).inputs): return True return any(children) @@ -354,11 +363,11 @@ def _link_info_has_stripped_filelist(children: list[bool], infos: [LinkInfos, No LinkInfosTSet = transitive_set( args_projections = { "default": _link_info_default_args, + "default_excluding_filelist": _link_info_default_excluding_filelist_args, "default_filelist": _link_info_default_filelist, - "default_shared": _link_info_default_shared_link_args, - "stripped": _link_info_stripped_args, + "stripped": _link_info_stripped_link_args, + "stripped_excluding_filelist": _link_info_stripped_excluding_filelist_args, "stripped_filelist": _link_info_stripped_filelist, - "stripped_shared": _link_info_stripped_shared_link_args, }, reductions = { "has_default_filelist": _link_info_has_default_filelist, @@ -390,11 +399,11 @@ LinkArgs = record( LinkedObject = record( output = field([Artifact, Promise]), # The combined bitcode from this linked object and any static libraries - bitcode_bundle = field([Artifact, None], None), + bitcode_bundle = field(Artifact | None, None), # the generated linked output before running stripping(and bolt). unstripped_output = field(Artifact), # the generated linked output before running bolt, may be None if bolt is not used. - prebolt_output = field([Artifact, None], None), + prebolt_output = field(Artifact | None, None), # The LinkArgs used to produce this LinkedObject. This can be useful for debugging or # for downstream rules to reproduce the shared library with some modifications (for example # android relinker will link again with an added version script argument). @@ -402,32 +411,37 @@ LinkedObject = record( # A linked object (binary/shared library) may have an associated dwp file with # its corresponding DWARF debug info. # May be None when Split DWARF is disabled or for some types of synthetic link objects. - dwp = field([Artifact, None], None), + dwp = field(Artifact | None, None), # Additional dirs or paths that contain debug info referenced by the linked # object (e.g. split dwarf files or PDB file). external_debug_info = field(ArtifactTSet, ArtifactTSet()), # This argsfile is generated in the `cxx_link` step and contains a list of arguments # passed to the linker. It is being exposed as a sub-target for debugging purposes. - linker_argsfile = field([Artifact, None], None), + linker_argsfile = field(Artifact | None, None), # The filelist is generated in the `cxx_link` step and contains a list of # object files (static libs or plain object files) passed to the linker. # It is being exposed for debugging purposes. Only present when a Darwin # linker is used. - linker_filelist = field([Artifact, None], None), + linker_filelist = field(Artifact | None, None), # The linker command as generated by `cxx_link`. Exposed for debugging purposes only. # Not present for DistLTO scenarios. linker_command = field([cmd_args, None], None), # This sub-target is only available for distributed thinLTO builds. - index_argsfile = field([Artifact, None], None), + index_argsfile = field(Artifact | None, None), + # This sub-target is only available for distributed thinLTO builds. + dist_thin_lto_codegen_argsfile = field([Artifact, None], None), + # This sub-target is only available for distributed thinLTO builds. This is similar to + # index_argsfile, but only includes flags that can be determined at analysis time, no input files. + dist_thin_lto_index_argsfile = field([Artifact, None], None), # Import library for linking with DLL on Windows. # If not on Windows it's always None. - import_library = field([Artifact, None], None), + import_library = field(Artifact | None, None), # A linked object (binary/shared library) may have an associated PDB file with # its corresponding Windows debug info. # If not on Windows it's always None. - pdb = field([Artifact, None], None), + pdb = field(Artifact | None, None), # Split-debug info generated by the link. - split_debug_output = field([Artifact, None], None), + split_debug_output = field(Artifact | None, None), ) # A map of native linkable infos from transitive dependencies for each LinkStrategy. @@ -536,6 +550,7 @@ def create_merged_link_info( value = dep_info._external_debug_info.get(link_strategy) if value: external_debug_info_children.append(value) + framework_linkables.append(dep_info.frameworks[link_strategy]) swiftmodule_linkables.append(dep_info.swiftmodules[link_strategy]) swift_runtime_linkables.append(dep_info.swift_runtime[link_strategy]) @@ -616,7 +631,8 @@ def create_merged_link_info_for_propagation( def get_link_info( infos: LinkInfos, - prefer_stripped: bool = False) -> LinkInfo: + prefer_stripped: bool = False, + prefer_optimized: bool = False) -> LinkInfo: """ Helper for getting a `LinkInfo` out of a `LinkInfos`. """ @@ -624,22 +640,19 @@ def get_link_info( # When requested, prefer using pre-stripped link info. if prefer_stripped and infos.stripped != None: return infos.stripped + if prefer_optimized and infos.optimized: + return infos.optimized return infos.default -def unpack_link_args(args: LinkArgs, is_shared: [bool, None] = None, link_ordering: [LinkOrdering, None] = None) -> ArgLike: +def unpack_link_args(args: LinkArgs, link_ordering: [LinkOrdering, None] = None) -> ArgLike: if args.tset != None: ordering = link_ordering.value if link_ordering else "preorder" tset = args.tset.infos - if is_shared: - if args.tset.prefer_stripped: - return tset.project_as_args("stripped_shared", ordering = ordering) - return tset.project_as_args("default_shared", ordering = ordering) - else: - if args.tset.prefer_stripped: - return tset.project_as_args("stripped", ordering = ordering) - return tset.project_as_args("default", ordering = ordering) + if args.tset.prefer_stripped: + return tset.project_as_args("stripped", ordering = ordering) + return tset.project_as_args("default", ordering = ordering) if args.infos != None: return cmd_args([link_info_to_args(info) for info in args.infos]) @@ -658,20 +671,37 @@ def unpack_link_args_filelist(args: LinkArgs) -> [ArgLike, None]: return tset.project_as_args("stripped_filelist" if stripped else "default_filelist") if args.infos != None: - filelist = flatten([link_info_filelist(info) for info in args.infos]) - if not filelist: + result_args = cmd_args() + for info in args.infos: + result_args.add(link_info_to_args(info, argument_type_filter = LinkInfoArgumentFilter("filelist_only"))) + + if not len(result_args.inputs): return None - # Actually create cmd_args so the API is consistent between the 2 branches. - args = cmd_args() - args.add(filelist) - return args + return result_args if args.flags != None: return None fail("Unpacked invalid empty link args") +def unpack_link_args_excluding_filelist(args: LinkArgs, link_ordering: [LinkOrdering, None] = None) -> ArgLike: + if args.tset != None: + ordering = link_ordering.value if link_ordering else "preorder" + + tset = args.tset.infos + if args.tset.prefer_stripped: + return tset.project_as_args("stripped_excluding_filelist", ordering = ordering) + return tset.project_as_args("default_excluding_filelist", ordering = ordering) + + if args.infos != None: + return cmd_args([link_info_to_args(info, LinkInfoArgumentFilter("excluding_filelist")) for info in args.infos]) + + if args.flags != None: + return args.flags + + fail("Unpacked invalid empty link args") + def unpack_external_debug_info(actions: AnalysisActions, args: LinkArgs) -> ArtifactTSet: if args.tset != None: if args.tset.prefer_stripped: @@ -693,7 +723,7 @@ def map_to_link_infos(links: list[LinkArgs]) -> list[LinkInfo]: res = [] def append(v): - if v.pre_flags or v.post_flags or v.linkables: + if v.pre_flags or v.post_flags or v.dist_thin_lto_codegen_flags or v.linkables: res.append(v) for link in links: @@ -709,7 +739,7 @@ def map_to_link_infos(links: list[LinkArgs]) -> list[LinkInfo]: append(link) continue if link.flags != None: - append(LinkInfo(pre_flags = link.flags)) + append(LinkInfo(pre_flags = [link.flags])) continue fail("Unpacked invalid empty link args") return res @@ -879,13 +909,13 @@ def merge_swiftmodule_linkables(ctx: AnalysisContext, linkables: list[[Swiftmodu ], )) -def wrap_with_no_as_needed_shared_libs_flags(linker_type: str, link_info: LinkInfo) -> LinkInfo: +def wrap_with_no_as_needed_shared_libs_flags(linker_type: LinkerType, link_info: LinkInfo) -> LinkInfo: """ Wrap link info in args used to prevent linkers from dropping unused shared library dependencies from the e.g. DT_NEEDED tags of the link. """ - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): return wrap_link_info( inner = link_info, pre_flags = ( @@ -895,7 +925,7 @@ def wrap_with_no_as_needed_shared_libs_flags(linker_type: str, link_info: LinkIn post_flags = ["-Wl,--pop-state"], ) - if linker_type == "darwin": + if linker_type == LinkerType("darwin"): return link_info fail("Linker type {} not supported".format(linker_type)) @@ -907,7 +937,9 @@ LinkCommandDebugOutput = record( filename = str, command = ArgLike, argsfile = Artifact, - filelist = [Artifact, None], + filelist = Artifact | None, + dist_thin_lto_codegen_argsfile = Artifact | None, + dist_thin_lto_index_argsfile = Artifact | None, ) # NB: Debug output is _not_ transitive over deps, so tsets are not used here. @@ -922,31 +954,52 @@ UnstrippedLinkOutputInfo = provider(fields = { }) def make_link_command_debug_output(linked_object: LinkedObject) -> [LinkCommandDebugOutput, None]: - if not linked_object.output or not linked_object.linker_command or not linked_object.linker_argsfile: + local_link_debug_info_present = linked_object.output and linked_object.linker_command and linked_object.linker_argsfile + distributed_link_debug_info_present = linked_object.dist_thin_lto_index_argsfile and linked_object.dist_thin_lto_codegen_argsfile + if not local_link_debug_info_present and not distributed_link_debug_info_present: return None return LinkCommandDebugOutput( filename = linked_object.output.short_path, command = linked_object.linker_command, argsfile = linked_object.linker_argsfile, filelist = linked_object.linker_filelist, + dist_thin_lto_index_argsfile = linked_object.dist_thin_lto_index_argsfile, + dist_thin_lto_codegen_argsfile = linked_object.dist_thin_lto_codegen_argsfile, ) # Given a list of `LinkCommandDebugOutput`, it will produce a JSON info file. # The JSON info file will contain entries for each link command. In addition, # it will _not_ materialize any inputs to the link command except: +# +# For local thin-LTO: # - linker argfile # - linker filelist (if present - only applicable to Darwin linkers) +# +# For distributed thin-LTO: +# - thin-link argsfile (without inputs just flags) +# - codegen argsfile (without inputs just flags) def make_link_command_debug_output_json_info(ctx: AnalysisContext, debug_outputs: list[LinkCommandDebugOutput]) -> Artifact: json_info = [] associated_artifacts = [] for debug_output in debug_outputs: - json_info.append({ - "command": debug_output.command, - "filename": debug_output.filename, - }) - - # Ensure all argsfile and filelists get materialized, as those are needed for debugging - associated_artifacts.extend(filter(None, [debug_output.argsfile, debug_output.filelist])) + is_distributed_link = debug_output.dist_thin_lto_index_argsfile and debug_output.dist_thin_lto_codegen_argsfile + if is_distributed_link: + json_info.append({ + "dist_thin_lto_codegen_argsfile": debug_output.dist_thin_lto_codegen_argsfile, + "dist_thin_lto_index_argsfile": debug_output.dist_thin_lto_index_argsfile, + "filename": debug_output.filename, + }) + + associated_artifacts.extend([debug_output.dist_thin_lto_codegen_argsfile, debug_output.dist_thin_lto_index_argsfile]) + else: + json_info.append({ + "argsfile": debug_output.argsfile, + "command": debug_output.command, + "filename": debug_output.filename, + }) + + # Ensure all argsfile and filelists get materialized, as those are needed for debugging + associated_artifacts.extend(filter(None, [debug_output.argsfile, debug_output.filelist])) # Explicitly drop all inputs by using `with_inputs = False`, we don't want # to materialize all inputs to the link actions (which includes all object files diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index 1f73160ed4a..528d3ae022e 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -7,11 +7,20 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo") +load("@prelude//cxx:platform.bzl", "cxx_by_platform") + +# TODO(mattpayne): Add this back once the type is supported by dependency mgmt +# load("@prelude//cxx:shared_library_interface.bzl", "SharedInterfaceInfo") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//python:python.bzl", "PythonLibraryInfo") load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal_by", + "depth_first_traversal_by", +) +load( + "@prelude//utils:utils.bzl", + "flatten", ) load( ":link_info.bzl", @@ -19,14 +28,16 @@ load( "LinkInfo", # @unused Used as a type "LinkInfos", "LinkStrategy", - "Linkage", - "LinkedObject", "LinkerFlags", "MergedLinkInfo", "get_lib_output_style", "get_output_styles_for_linkage", _get_link_info = "get_link_info", ) +load( + ":shared_libraries.bzl", + "SharedLibraries", +) # A provider with information used to link a rule into a shared library. # Potential omnibus roots must provide this so that omnibus can link them @@ -34,6 +45,7 @@ load( LinkableRootInfo = provider( # @unsorted-dict-items fields = { + "label": provider_field(Label), "link_infos": provider_field(typing.Any, default = None), # LinkInfos "name": provider_field(typing.Any, default = None), # [str, None] "deps": provider_field(typing.Any, default = None), # ["label"] @@ -47,6 +59,7 @@ LinkableRootInfo = provider( ############################################################################### _DisallowConstruction = record() +_TargetSourceType = Artifact | str | tuple LinkableNode = record( # Attribute labels on the target. @@ -63,6 +76,10 @@ LinkableNode = record( # deps and their (transitive) exported deps. This helps keep link lines smaller # and produces more efficient libs (for example, DT_NEEDED stays a manageable size). exported_deps = field(list[Label], []), + + # List of both deps and exported deps. We traverse linkable graph lots of times + # and preallocating this list saves RAM during analysis + all_deps = field(list[Label], []), # Link infos for all supported lib output styles supported by this node. This should have a value # for every output_style supported by the preferred linkage. link_infos = field(dict[LibOutputStyle, LinkInfos], {}), @@ -74,7 +91,7 @@ LinkableNode = record( # Shared libraries provided by this target. Used if this target is # excluded. - shared_libs = field(dict[str, LinkedObject], {}), + shared_libs = field(SharedLibraries, SharedLibraries(libraries = [])), # The soname this node would use in default link strategies. May be used by non-default # link strategies as a lib's soname. @@ -84,12 +101,21 @@ LinkableNode = record( # as an asset in android apks. can_be_asset = field(bool), + # Collected target sources from the target. + srcs = field(list[_TargetSourceType]), + # Whether the node should appear in the android mergemap (which provides information about the original # soname->final merged lib mapping) include_in_android_mergemap = field(bool), # Don't follow dependents on this node even if has preferred linkage static ignore_force_static_follows_dependents = field(bool), + # Shared interface provider for this node. + # TODO(mattpayne): This type is incompatible with Autodeps. + # Once the pyautotargets service is rolled out, we can change it back. + # It should be SharedInterfaceInfo | None + shared_interface_info = field(typing.Any), + # Only allow constructing within this file. _private = _DisallowConstruction, ) @@ -135,6 +161,14 @@ def _get_required_outputs_for_linkage(linkage: Linkage) -> list[LibOutputStyle]: return get_output_styles_for_linkage(linkage) +def _get_target_sources(ctx: AnalysisContext) -> list[_TargetSourceType]: + srcs = [] + if hasattr(ctx.attrs, "srcs"): + srcs.extend(ctx.attrs.srcs) + if hasattr(ctx.attrs, "platform_srcs"): + srcs.extend(flatten(cxx_by_platform(ctx, ctx.attrs.platform_srcs))) + return srcs + def create_linkable_node( ctx: AnalysisContext, default_soname: str | None, @@ -143,11 +177,15 @@ def create_linkable_node( deps: list[Dependency | LinkableGraph] = [], exported_deps: list[Dependency | LinkableGraph] = [], link_infos: dict[LibOutputStyle, LinkInfos] = {}, - shared_libs: dict[str, LinkedObject] = {}, + shared_libs: SharedLibraries = SharedLibraries(libraries = []), can_be_asset: bool = True, include_in_android_mergemap: bool = True, linker_flags: [LinkerFlags, None] = None, - ignore_force_static_follows_dependents: bool = False) -> LinkableNode: + ignore_force_static_follows_dependents: bool = False, + # TODO(mattpayne): This type is incompatible with Autodeps. + # Once the pyautotargets service is rolled out, we can change it back. + # It should be SharedInterfaceInfo | None + shared_interface_info: typing.Any = None) -> LinkableNode: for output_style in _get_required_outputs_for_linkage(preferred_linkage): expect( output_style in link_infos, @@ -155,19 +193,24 @@ def create_linkable_node( ) if not linker_flags: linker_flags = LinkerFlags() + deps = linkable_deps(deps) + exported_deps = linkable_deps(exported_deps) return LinkableNode( labels = ctx.attrs.labels, preferred_linkage = preferred_linkage, default_link_strategy = default_link_strategy, - deps = linkable_deps(deps), - exported_deps = linkable_deps(exported_deps), + deps = deps, + exported_deps = exported_deps, + all_deps = deps + exported_deps, link_infos = link_infos, shared_libs = shared_libs, can_be_asset = can_be_asset, + srcs = _get_target_sources(ctx), include_in_android_mergemap = include_in_android_mergemap, default_soname = default_soname, linker_flags = linker_flags, ignore_force_static_follows_dependents = ignore_force_static_follows_dependents, + shared_interface_info = shared_interface_info, _private = _DisallowConstruction(), ) @@ -194,7 +237,7 @@ def create_linkable_graph( deps: list[[LinkableGraph, Dependency]] = []) -> LinkableGraph: graph_deps = [] for d in deps: - if eval_type(LinkableGraph.type).matches(d): + if isinstance(d, LinkableGraph): graph_deps.append(d) else: graph = d.get(LinkableGraph) @@ -238,7 +281,7 @@ def linkable_deps(deps: list[Dependency | LinkableGraph]) -> list[Label]: labels = [] for dep in deps: - if eval_type(LinkableGraph.type).matches(dep): + if isinstance(dep, LinkableGraph): labels.append(dep.label) else: dep_info = linkable_graph(dep) @@ -272,10 +315,12 @@ def linkable_graph(dep: Dependency) -> [LinkableGraph, None]: def get_link_info( node: LinkableNode, output_style: LibOutputStyle, - prefer_stripped: bool = False) -> LinkInfo: + prefer_stripped: bool = False, + prefer_optimized: bool = False) -> LinkInfo: info = _get_link_info( node.link_infos[output_style], prefer_stripped = prefer_stripped, + prefer_optimized = prefer_optimized, ) return info @@ -307,8 +352,8 @@ def get_transitive_deps( """ def find_transitive_deps(node: Label): - return link_infos[node].deps + link_infos[node].exported_deps + return link_infos[node].all_deps - all_deps = breadth_first_traversal_by(link_infos, roots, find_transitive_deps) + all_deps = depth_first_traversal_by(link_infos, roots, find_transitive_deps) return all_deps diff --git a/prelude/linking/lto.bzl b/prelude/linking/lto.bzl index f275d005901..fab91ec6d70 100644 --- a/prelude/linking/lto.bzl +++ b/prelude/linking/lto.bzl @@ -5,7 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) load("@prelude//cxx:debug.bzl", "SplitDebugMode") # Styles of LTO. @@ -50,7 +54,7 @@ def get_split_debug_lto_info(actions: AnalysisActions, cxx_toolchain: CxxToolcha # TODO: It might be nice to generalize a but more and move the darwin v. gnu # differences into toolchain settings (e.g. `split_debug_lto_flags_fmt`). - if linker_info.type == "darwin": + if linker_info.type == LinkerType("darwin"): # https://releases.llvm.org/14.0.0/tools/clang/docs/CommandGuide/clang.html#cmdoption-flto # We need to pass -object_path_lto to keep the temporary LTO object files around to use # for dSYM generation. @@ -74,7 +78,7 @@ def get_split_debug_lto_info(actions: AnalysisActions, cxx_toolchain: CxxToolcha linker_flags = linker_args, ) - if linker_info.type == "gnu": + if linker_info.type == LinkerType("gnu"): dwo_dir = actions.declare_output(name + ".dwo.d", dir = True) linker_flags = cmd_args([ diff --git a/prelude/linking/shared_libraries.bzl b/prelude/linking/shared_libraries.bzl index 7d95e534524..e4303f8667b 100644 --- a/prelude/linking/shared_libraries.bzl +++ b/prelude/linking/shared_libraries.bzl @@ -12,6 +12,20 @@ load( "LinkedObject", # @unused Used as a type ) load("@prelude//linking:strip.bzl", "strip_object") +load("@prelude//utils:expect.bzl", "expect") + +Soname = record( + # Return the SONAME if it's a string, otherwise None. + as_str = field(typing.Callable), + # Return the SONAME as a string, throwing an error if it is actually an + # artifact. + ensure_str = field(typing.Callable), + # Return `True` if the SONAME is respresented as a string. + is_str = field(bool), + # The the actual SONAME can be rerepsented by a static string, or the + # contents of a file genrated at build time. + _soname = field(str | Artifact), +) SharedLibrary = record( lib = field(LinkedObject), @@ -19,23 +33,48 @@ SharedLibrary = record( # for downstream rules to reproduce the shared library with some modifications (for example # android relinker will link again with an added version script argument). # TODO(cjhopman): This is currently always available. - link_args = field(list[LinkArgs] | None), + link_args = field(list[LinkArgs] | None, None), # The sonames of the shared libraries that this links against. # TODO(cjhopman): This is currently always available. - shlib_deps = field(list[str] | None), - stripped_lib = field([Artifact, None]), - can_be_asset = field(bool), - for_primary_apk = field(bool), - soname = field(str), + shlib_deps = field(list[str] | None, None), + stripped_lib = field(Artifact | None, None), + can_be_asset = field(bool, False), + for_primary_apk = field(bool, False), + soname = field(Soname), label = field(Label), ) +def _ensure_str(soname: str | Artifact) -> str: + expect(type(soname) == type(""), "SONAME is not a `str`: {}", soname) + return soname + +def to_soname(soname: str | Artifact | Soname) -> Soname: + if isinstance(soname, Soname): + return soname + soname_is_str = isinstance(soname, str) + return Soname( + as_str = lambda: soname if soname_is_str else None, + ensure_str = lambda: _ensure_str(soname), + is_str = soname_is_str, + _soname = soname, + ) + +def create_shlib( + # The soname can either be a string or an artifact with the soname in + # text form. + soname: str | Artifact | Soname, + **kwargs) -> SharedLibrary: + return SharedLibrary( + soname = to_soname(soname), + **kwargs + ) + SharedLibraries = record( # A mapping of shared library SONAME (e.g. `libfoo.so.2`) to the artifact. # Since the SONAME is what the dynamic loader uses to uniquely identify # libraries, using this as the key allows easily detecting conflicts from # dependencies. - libraries = field(dict[str, SharedLibrary]), + libraries = field(list[SharedLibrary]), ) # T-set of SharedLibraries @@ -53,6 +92,27 @@ def get_strip_non_global_flags(cxx_toolchain: CxxToolchainInfo) -> list: return ["--strip-unneeded"] +def create_shlib_from_ctx( + ctx: AnalysisContext, + soname: str | Artifact | Soname, + lib: LinkedObject): + cxx_toolchain = getattr(ctx.attrs, "_cxx_toolchain", None) + return create_shlib( + lib = lib, + stripped_lib = strip_object( + ctx, + cxx_toolchain[CxxToolchainInfo], + lib.output, + cmd_args(get_strip_non_global_flags(cxx_toolchain[CxxToolchainInfo])), + ) if cxx_toolchain != None else None, + link_args = lib.link_args, + shlib_deps = None, # TODO(cjhopman): we need this figured out. + can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, + for_primary_apk = getattr(ctx.attrs, "used_by_wrap_script", False), + label = ctx.label, + soname = soname, + ) + def create_shared_libraries( ctx: AnalysisContext, libraries: dict[str, LinkedObject]) -> SharedLibraries: @@ -60,57 +120,13 @@ def create_shared_libraries( Take a mapping of dest -> src and turn it into a mapping that will be passed around in providers. Used for both srcs, and resources. """ - cxx_toolchain = getattr(ctx.attrs, "_cxx_toolchain", None) return SharedLibraries( - libraries = {name: SharedLibrary( - lib = shlib, - stripped_lib = strip_object( - ctx, - cxx_toolchain[CxxToolchainInfo], - shlib.output, - cmd_args(get_strip_non_global_flags(cxx_toolchain[CxxToolchainInfo])), - ) if cxx_toolchain != None else None, - link_args = shlib.link_args, - shlib_deps = None, # TODO(cjhopman): we need this figured out. - can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, - for_primary_apk = getattr(ctx.attrs, "used_by_wrap_script", False), - label = ctx.label, - soname = name, - ) for (name, shlib) in libraries.items()}, + libraries = [ + create_shlib_from_ctx(ctx = ctx, soname = name, lib = shlib) + for (name, shlib) in libraries.items() + ], ) -# We do a lot of merging library maps, so don't use O(n) type annotations -def _merge_lib_map( - # dict[str, SharedLibrary] - dest_mapping, - # [dict[str, SharedLibrary] - mapping_to_merge, - filter_func) -> None: - """ - Merges a mapping_to_merge into `dest_mapping`. Fails if different libraries - map to the same name. - """ - for (name, src) in mapping_to_merge.items(): - if filter_func != None and not filter_func(name, src): - continue - existing = dest_mapping.get(name) - if existing != None and existing.lib != src.lib: - error = ( - "Duplicate library {}! Conflicting mappings:\n" + - "{} from {}\n" + - "{} from {}" - ) - fail( - error.format( - name, - existing.lib, - existing.label, - src.lib, - src.label, - ), - ) - dest_mapping[name] = src - # Merge multiple SharedLibraryInfo. The value in `node` represents a set of # SharedLibraries that is provided by the target being analyzed. It's optional # because that might not always exist, e.g. a Python library can pass through @@ -131,11 +147,156 @@ def merge_shared_libraries( set = actions.tset(SharedLibrariesTSet, **kwargs) if kwargs else None return SharedLibraryInfo(set = set) -def traverse_shared_library_info( - info: SharedLibraryInfo, - filter_func = None): # -> dict[str, SharedLibrary]: - libraries = {} +def traverse_shared_library_info(info: SharedLibraryInfo): # -> list[SharedLibrary]: + libraries = [] if info.set: for libs in info.set.traverse(): - _merge_lib_map(libraries, libs.libraries, filter_func) + libraries.extend(libs.libraries) return libraries + +# Helper to merge shlibs, throwing an error if more than one have the same SONAME. +def _merge_shlibs( + shared_libs: list[SharedLibrary], + resolve_soname: typing.Callable) -> dict[str, SharedLibrary]: + merged = {} + for shlib in shared_libs: + soname = resolve_soname(shlib.soname) + existing = merged.get(soname) + if existing != None and existing.lib != shlib.lib: + error = ( + "Duplicate library {}! Conflicting mappings:\n" + + "{} from {}\n" + + "{} from {}" + ) + fail( + error.format( + shlib.soname, + existing.lib, + existing.label, + shlib.lib, + shlib.label, + ), + ) + merged[soname] = shlib + return merged + +def with_unique_str_sonames( + shared_libs: list[SharedLibrary], + skip_dynamic: bool = False) -> dict[str, SharedLibrary]: + """ + Convert a list of `SharedLibrary`s to a map of unique SONAMEs to the + corresponding `SharedLibrary`. + + Will fail if the same SONAME maps to multiple `SharedLibrary`s. + """ + return _merge_shlibs( + shared_libs = [ + shlib + for shlib in shared_libs + if shlib.soname.is_str or not skip_dynamic + ], + resolve_soname = lambda s: s.ensure_str(), + ) + +def gen_shared_libs_action( + actions: AnalysisActions, + out: str, + shared_libs: list[SharedLibrary], + gen_action: typing.Callable, + dir = False): + """ + Produce an action by first resolving all SONAME of the given shlibs and + enforcing that each SONAME is unique. + + The provided `gen_action` callable is called with a map of unique SONAMEs + to the corresponding shlibs. + """ + + output = actions.declare_output(out, dir = dir) + + def func(actions, artifacts, output): + def resolve_soname(soname): + if soname.is_str: + return soname._soname + else: + return artifacts[soname._soname].read_string().strip() + + gen_action( + actions, + output, + _merge_shlibs( + shared_libs = shared_libs, + resolve_soname = resolve_soname, + ), + ) + + dynamic_sonames = [shlib.soname._soname for shlib in shared_libs if not shlib.soname.is_str] + if dynamic_sonames: + actions.dynamic_output( + dynamic = dynamic_sonames, + inputs = [], + outputs = [output.as_output()], + f = lambda ctx, artifacts, outputs: func(ctx.actions, artifacts, outputs[output]), + ) + else: + func(actions, {}, output) + + return output + +def zip_shlibs( + merged: dict[str, SharedLibrary], + vals: list[(SharedLibrary, typing.Any)]) -> list[(str, SharedLibrary, typing.Any)]: + """ + Helper to "zip" together the soname->shlib map to a list with associated + shared lib values. + + This is useful for callers of `gen_shared_libs_action` to combine the merged + shared libs, in dedup'd dict form, with some additional data. + """ + + zipped = [] + + # Walk through the shlib and val tuples + idx = 0 + for soname, shlib in merged.items(): + for idx in range(idx, len(vals)): + if vals[idx][0] == shlib: + break + zipped.append((soname, shlib, vals[idx][1])) + + return zipped + +def create_shlib_symlink_tree(actions: AnalysisActions, out: str, shared_libs: list[SharedLibrary]): + """ + Merged shared libs into a symlink tree mapping the library's SONAME to + it's artifact. + """ + return gen_shared_libs_action( + actions = actions, + out = out, + shared_libs = shared_libs, + gen_action = lambda actions, output, shared_libs: actions.symlinked_dir( + output, + {name: shlib.lib.output for name, shlib in shared_libs.items()}, + ), + dir = True, + ) + +def extract_soname_from_shlib( + actions: AnalysisActions, + name: str, + shared_lib: Artifact) -> Artifact: + """ + Extract the SONAME from a shared library into a file. + """ + soname = actions.declare_output(name) + cmd = cmd_args( + "sh", + "-c", + '''set -euo pipefail; objdump -p "$1" | grep SONAME | awk '{print $2}' > "$2"''', + "", + shared_lib, + soname.as_output(), + ) + actions.run(cmd, category = "extract_soname", identifier = shared_lib.short_path) + return soname diff --git a/prelude/linking/stamp_build_info.bzl b/prelude/linking/stamp_build_info.bzl new file mode 100644 index 00000000000..a12327bfab6 --- /dev/null +++ b/prelude/linking/stamp_build_info.bzl @@ -0,0 +1,33 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") + +def stamp_build_info(ctx: AnalysisContext, obj: Artifact) -> Artifact: + """ + If necessary, add fb_build_info section to binary via late-stamping + """ + if hasattr(ctx.attrs, "_build_info") and ctx.attrs._build_info: + ctx.attrs._build_info["late_stamping"] = True + build_info_json = ctx.actions.write_json(obj.short_path + "-build-info.json", ctx.attrs._build_info) + stem, ext = paths.split_extension(obj.short_path) + stamped_output = ctx.actions.declare_output(stem + "-stamped" + ext) + + ctx.actions.run( + cmd_args([ + get_cxx_toolchain_info(ctx).binary_utilities_info.objcopy, + "--add-section", + cmd_args(build_info_json, format = "fb_build_info={}"), + obj, + stamped_output.as_output(), + ]), + identifier = obj.short_path, + category = "stamp_build_info", + ) + return stamped_output + return obj diff --git a/prelude/linking/strip.bzl b/prelude/linking/strip.bzl index 6db25241349..9bcd22207db 100644 --- a/prelude/linking/strip.bzl +++ b/prelude/linking/strip.bzl @@ -6,7 +6,11 @@ # of this source tree. load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) def _strip_debug_info(ctx: AnalysisContext, out: str, obj: Artifact) -> Artifact: """ @@ -15,7 +19,7 @@ def _strip_debug_info(ctx: AnalysisContext, out: str, obj: Artifact) -> Artifact cxx_toolchain = get_cxx_toolchain_info(ctx) strip = cxx_toolchain.binary_utilities_info.strip output = ctx.actions.declare_output("__stripped__", out) - if cxx_toolchain.linker_info.type == "gnu": + if cxx_toolchain.linker_info.type == LinkerType("gnu"): cmd = cmd_args([strip, "--strip-debug", "--strip-unneeded", "-o", output.as_output(), obj]) else: cmd = cmd_args([strip, "-S", "-o", output.as_output(), obj]) @@ -80,10 +84,13 @@ def strip_object(ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, unstripp stripped_lib = ctx.actions.declare_output("stripped/{}".format(output_path)) # TODO(T109996375) support configuring the flags used for stripping - cmd = cmd_args() - cmd.add(strip) - cmd.add(strip_flags) - cmd.add([unstripped, "-o", stripped_lib.as_output()]) + cmd = cmd_args( + strip, + strip_flags, + unstripped, + "-o", + stripped_lib.as_output(), + ) effective_category_suffix = category_suffix if category_suffix else "shared_lib" category = "strip_{}".format(effective_category_suffix) @@ -106,7 +113,7 @@ def strip_debug_with_gnu_debuglink(ctx: AnalysisContext, name: str, obj: Artifac ctx.actions.run(cmd, category = "extract_debuginfo", identifier = name) binary_output = ctx.actions.declare_output("__stripped_objects__", name) - cmd = cmd_args([objcopy, "--strip-debug", "--add-gnu-debuglink", debuginfo_output, obj, binary_output.as_output()]) + cmd = cmd_args([objcopy, "--strip-debug", "--keep-file-symbols", "--add-gnu-debuglink", debuginfo_output, obj, binary_output.as_output()]) ctx.actions.run(cmd, category = "strip_debug", identifier = name) return binary_output, debuginfo_output diff --git a/prelude/linking/types.bzl b/prelude/linking/types.bzl new file mode 100644 index 00000000000..486318bedbc --- /dev/null +++ b/prelude/linking/types.bzl @@ -0,0 +1,16 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# Ways a library can request to be linked (e.g. usually specific via a rule +# param like `preferred_linkage`). The actual link style used for a library is +# usually determined by a combination of this and the link style being exported +# via a provider. +Linkage = enum( + "any", + "static", + "shared", +) diff --git a/prelude/matlab/matlab_program.bzl b/prelude/matlab/matlab_program.bzl index 42b150e61f3..c78cb1be37d 100644 --- a/prelude/matlab/matlab_program.bzl +++ b/prelude/matlab/matlab_program.bzl @@ -14,10 +14,10 @@ def matlab_program_impl(ctx: AnalysisContext) -> list[Provider]: cmd.add( "-batch", cmd_args( - ctx.attrs.main.basename.rstrip(".m"), + ctx.attrs.main.basename.removesuffix(".m"), quote = "shell", ), ) - cmd.add("-sd", cmd_args(ctx.attrs.main).parent()) + cmd.add("-sd", cmd_args(ctx.attrs.main, parent = 1)) return [DefaultInfo(default_output = None, other_outputs = [cmd]), RunInfo(cmd)] diff --git a/prelude/native.bzl b/prelude/native.bzl index b05f61cb106..2d774327a07 100644 --- a/prelude/native.bzl +++ b/prelude/native.bzl @@ -12,13 +12,14 @@ # **all** interpreted files. load("@prelude//android:cpu_filters.bzl", "ALL_CPU_FILTERS", "CPU_FILTER_FOR_DEFAULT_PLATFORM") -load("@prelude//apple:apple_macro_layer.bzl", "apple_binary_macro_impl", "apple_bundle_macro_impl", "apple_library_macro_impl", "apple_package_macro_impl", "apple_test_macro_impl", "apple_universal_executable_macro_impl", "apple_xcuitest_macro_impl") +load("@prelude//apple:apple_macro_layer.bzl", "apple_binary_macro_impl", "apple_bundle_macro_impl", "apple_library_macro_impl", "apple_package_macro_impl", "apple_test_macro_impl", "apple_universal_executable_macro_impl", "apple_xcuitest_macro_impl", "prebuilt_apple_framework_macro_impl") load("@prelude//apple/swift:swift_toolchain_macro_layer.bzl", "swift_toolchain_macro_impl") load("@prelude//cxx:cxx_toolchain.bzl", "cxx_toolchain_inheriting_target_platform") load("@prelude//cxx:cxx_toolchain_macro_layer.bzl", "cxx_toolchain_macro_impl") load("@prelude//cxx:cxx_toolchain_types.bzl", _cxx = "cxx") load("@prelude//erlang:erlang.bzl", _erlang_application = "erlang_application", _erlang_tests = "erlang_tests") load("@prelude//python:toolchain.bzl", _python = "python") +load("@prelude//rust:link_info.bzl", "RustLinkInfo") load("@prelude//rust:rust_common.bzl", "rust_common_macro_wrapper") load("@prelude//rust:rust_library.bzl", "rust_library_macro_wrapper") load("@prelude//rust:with_workspace.bzl", "with_rust_workspace") @@ -209,6 +210,14 @@ def _android_binary_macro_stub( **kwargs ) +def _android_bundle_macro_stub( + cpu_filters = None, + **kwargs): + __rules__["android_bundle"]( + cpu_filters = _get_valid_cpu_filters(cpu_filters), + **kwargs + ) + def _android_instrumentation_apk_macro_stub( cpu_filters = None, primary_dex_patterns = [], @@ -335,6 +344,13 @@ def _apple_watchos_bundle_macro_stub(**kwargs): **kwargs ) +def _apple_macos_bundle_macro_stub(**kwargs): + apple_bundle_macro_impl( + apple_bundle_rule = _user_rules["apple_macos_bundle"], + apple_resource_bundle_rule = _user_rules["apple_resource_bundle"], + **kwargs + ) + def _apple_test_macro_stub(**kwargs): apple_test_macro_impl( apple_test_rule = __rules__["apple_test"], @@ -364,6 +380,7 @@ def _apple_library_macro_stub(**kwargs): def _apple_package_macro_stub(**kwargs): apple_package_macro_impl( apple_package_rule = __rules__["apple_package"], + apple_ipa_package_rule = _user_rules["apple_ipa_package"], **kwargs ) @@ -381,31 +398,21 @@ def _swift_toolchain_macro_stub(**kwargs): **kwargs ) -def _cxx_toolchain_macro_stub(inherit_target_platform = False, **kwargs): - if inherit_target_platform: - rule = cxx_toolchain_inheriting_target_platform - if is_full_meta_repo(): - cache_links = kwargs.get("cache_links") - kwargs["cache_links"] = select({ - "DEFAULT": cache_links, - "ovr_config//build_mode:fbcode-build-info-mode-disable": True, - "ovr_config//build_mode:fbcode-build-info-mode-full": False, - "ovr_config//build_mode:fbcode-build-info-mode-stable": True, - }) - else: - rule = __rules__["cxx_toolchain"] +def _cxx_toolchain_macro_stub(**kwargs): + if is_full_meta_repo(): + cache_links = kwargs.get("cache_links") + kwargs["cache_links"] = select({ + "DEFAULT": cache_links, + "ovr_config//platform/execution/constraints:execution-platform-transitioned": True, + }) cxx_toolchain_macro_impl( - cxx_toolchain_rule = rule, + cxx_toolchain_rule = cxx_toolchain_inheriting_target_platform, **kwargs ) -def _cxx_toolchain_override_macro_stub(inherit_target_platform = False, **kwargs): - if inherit_target_platform: - rule = _user_rules["cxx_toolchain_override_inheriting_target_platform"] - else: - rule = _user_rules["cxx_toolchain_override"] +def _cxx_toolchain_override_macro_stub(**kwargs): cxx_toolchain_macro_impl( - cxx_toolchain_rule = rule, + cxx_toolchain_rule = _user_rules["cxx_toolchain_override"], **kwargs ) @@ -436,16 +443,24 @@ def _rust_test_macro_stub(**kwargs): rust_test = rust_common_macro_wrapper(__rules__["rust_test"]) rust_test(**kwargs) +def _prebuilt_apple_framework_macro_stub(**kwargs): + prebuilt_apple_framework_macro_impl( + prebuilt_apple_framework_rule = __rules__["prebuilt_apple_framework"], + **kwargs + ) + # TODO(cjhopman): These macro wrappers should be handled in prelude/rules.bzl+rule_impl.bzl. # Probably good if they were defined to take in the base rule that # they are wrapping and return the wrapped one. __extra_rules__ = { "android_aar": _android_aar_macro_stub, "android_binary": _android_binary_macro_stub, + "android_bundle": _android_bundle_macro_stub, "android_instrumentation_apk": _android_instrumentation_apk_macro_stub, "apple_binary": _apple_binary_macro_stub, "apple_bundle": _apple_bundle_macro_stub, "apple_library": _apple_library_macro_stub, + "apple_macos_bundle": _apple_macos_bundle_macro_stub, "apple_package": _apple_package_macro_stub, "apple_test": _apple_test_macro_stub, "apple_universal_executable": _apple_universal_executable_macro_stub, @@ -457,6 +472,7 @@ __extra_rules__ = { "erlang_application": _erlang_application_macro_stub, "erlang_tests": _erlang_tests_macro_stub, "export_file": _export_file_macro_stub, + "prebuilt_apple_framework": _prebuilt_apple_framework_macro_stub, "prebuilt_cxx_library": _prebuilt_cxx_library_macro_stub, "python_library": _python_library_macro_stub, "rust_binary": _rust_binary_macro_stub, @@ -467,12 +483,17 @@ __extra_rules__ = { "versioned_alias": _versioned_alias_macro_stub, } -__shimmed_native__ = __struct_to_dict(__internal__) +__shimmed_native__ = __struct_to_dict(__buck2_builtins__) __shimmed_native__.update(__rules__) __shimmed_native__.update(_user_rules) # Should come after the rules which are macro overridden __shimmed_native__.update(__extra_rules__) __shimmed_native__.update({"cxx": _cxx, "python": _python}) +__shimmed_native__.update({ + "__internal_autodeps_hacks__": struct( + rust_link_info = RustLinkInfo, + ), +}) native = struct(**__shimmed_native__) diff --git a/prelude/ocaml/ocaml.bzl b/prelude/ocaml/ocaml.bzl index 67f0e6d4f8f..9cfe8fa2015 100644 --- a/prelude/ocaml/ocaml.bzl +++ b/prelude/ocaml/ocaml.bzl @@ -92,7 +92,7 @@ load( "@prelude//python:python.bzl", "PythonLibraryInfo", ) -load("@prelude//utils:graph_utils.bzl", "breadth_first_traversal", "post_order_traversal") +load("@prelude//utils:graph_utils.bzl", "depth_first_traversal", "post_order_traversal") load("@prelude//utils:platform_flavors_util.bzl", "by_platform") load("@prelude//utils:utils.bzl", "filter_and_map_idx", "flatten") load(":makefile.bzl", "parse_makefile") @@ -167,7 +167,7 @@ def _mk_script(ctx: AnalysisContext, file: str, args: list[typing.Any], env: dic is_executable = True, allow_args = True, ) - return cmd_args(script).hidden(args, env.values()) + return cmd_args(script, hidden = args + env.values()) # An environment in which a custom `bin` is at the head of `$PATH`. def _mk_env(ctx: AnalysisContext) -> dict[str, cmd_args]: @@ -321,8 +321,10 @@ def _preprocess(ctx: AnalysisContext, srcs: list[Artifact], build_mode: BuildMod parser_sig = ctx.actions.declare_output(name + ".mli") result.extend((parser_sig, parser)) - cmd = cmd_args([menhir, "--fixed-exception", "-b", cmd_args(prefix).ignore_artifacts(), src]) - cmd.hidden(parser.as_output(), parser_sig.as_output()) + cmd = cmd_args( + [menhir, "--fixed-exception", "-b", cmd_args(prefix, ignore_artifacts = True), src], + hidden = [parser.as_output(), parser_sig.as_output()], + ) ctx.actions.run(cmd, category = "ocaml_yacc_" + build_mode.value, identifier = src.short_path) elif ext == ".mll": @@ -353,7 +355,7 @@ def _depends(ctx: AnalysisContext, srcs: list[Artifact], build_mode: BuildMode) dep_cmdline.add([cmd_args(f, format = "\"{}\"") for f in ctx.attrs.ocamldep_flags]) # These -I's are for ocamldep. - dep_cmdline.add(cmd_args([cmd_args(src).parent() for src in srcs], format = "-I {}")) + dep_cmdline.add(cmd_args([cmd_args(src, parent = 1) for src in srcs], format = "-I {}")) dep_cmdline.add(srcs) dep_script_name = "ocamldep_" + build_mode.value + ".sh" dep_sh, _ = ctx.actions.write( @@ -362,7 +364,7 @@ def _depends(ctx: AnalysisContext, srcs: list[Artifact], build_mode: BuildMode) is_executable = True, allow_args = True, ) - ctx.actions.run(cmd_args(dep_sh).hidden(dep_output.as_output(), dep_cmdline), category = "ocamldep_" + build_mode.value) + ctx.actions.run(cmd_args(dep_sh, hidden = [dep_output.as_output(), dep_cmdline]), category = "ocamldep_" + build_mode.value) return dep_output # Compile all the context's sources. If bytecode compiling, 'cmxs' & 'objs' will @@ -500,7 +502,7 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> # the dependency of 'src' on other files in 'srcs'. depends_include_paths = [] seen_dirs = {} - for d in breadth_first_traversal(makefile2, makefile2.get(src, [])): + for d in depth_first_traversal(makefile2, makefile2.get(src, [])): # 'src' depends on 'd' (e.g. src='quux.ml' depends on # d='quux.mli'). # @@ -516,7 +518,7 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> if i != None: p = paths.dirname(i.short_path) if not p in seen_dirs: - depends_include_paths.append(cmd_args(i).parent()) + depends_include_paths.append(cmd_args(i, parent = 1)) seen_dirs[p] = None # *All* the include paths needed to compile 'src'. @@ -528,7 +530,7 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> cmd.add(src, "-c", "-o", mk_out(cmi)) if build_mode.value == "expand": cmd.add("-dsource") - cmd.hidden(mk_out(cmti), depends_produce) + cmd.add(cmd_args(hidden = [mk_out(cmti), depends_produce])) if build_mode.value == "expand": sh = cmd_args(["/bin/sh", "-c", '"$@" 2> "$preprocessed_source_file"', "--", cmd]) @@ -540,22 +542,22 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> elif ext == ".ml": (obj, cmo, cmx, cmt, cmi, ppml) = produces[src] cmd = _compile_cmd(ctx, compiler, build_mode, cc, all_include_paths) - cmd.hidden(depends_produce) + cmd.add(cmd_args(hidden = depends_produce)) if cmo != None: cmd.add(src, "-c", "-o", mk_out(cmo)) if cmx != None: cmd.add(src, "-c", "-o", mk_out(cmx)) - cmd.hidden(mk_out(cmt)) + cmd.add(cmd_args(hidden = mk_out(cmt))) if build_mode.value == "expand": cmd.add("-dsource") if obj != None: - cmd.hidden(mk_out(obj)) + cmd.add(cmd_args(hidden = mk_out(obj))) if cmi != None: cmd.add("-intf-suffix", ",nomli,") # ignore any .mlis that aren't explicit dependencies - cmd.hidden(mk_out(cmi)) + cmd.add(cmd_args(hidden = mk_out(cmi))) else: # An explicit '.mli' for this '.ml' is a dependency. - cmd.hidden(mlis[paths.replace_extension(src.short_path, ".mli")]) + cmd.add(cmd_args(hidden = mlis[paths.replace_extension(src.short_path, ".mli")])) if build_mode.value == "expand": sh = cmd_args(["/bin/sh", "-c", '"$@" 2> "$preprocessed_source_file"', "--", cmd]) @@ -570,7 +572,7 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> # `ocaml_object` breaks for `-flto=...` so ensure `-fno-lto` prevails here. cmd.add(src, "-c", "-ccopt", "-fno-lto", "-ccopt", cmd_args(mk_out(stb), format = "-o \"{}\"")) - cmd.hidden(headers) # Any .h files given are dependencies. + cmd.add(cmd_args(hidden = headers)) # Any .h files given are dependencies. ctx.actions.run(cmd, category = "ocaml_compile_c", identifier = src.short_path) elif ext == ".h": @@ -582,7 +584,12 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> if outputs == []: ctx.actions.write(cmxs_order, "") else: - ctx.actions.dynamic_output(dynamic = [depends_output], inputs = todo_inputs, outputs = outputs + [cmxs_order], f = f) + ctx.actions.dynamic_output( + dynamic = [depends_output], + inputs = todo_inputs, + outputs = [o.as_output() for o in outputs + [cmxs_order]], + f = f, + ) return CompileResultInfo(cmxs_order = cmxs_order, stbs = stbs, objs = objs, cmis = cmis, cmos = cmos, cmxs = cmxs, cmts = cmts, cmtis = cmtis, ppmlis = ppmlis, ppmls = ppmls) @@ -594,15 +601,17 @@ def _include_paths(cmis: list[Artifact], cmos: list[Artifact]) -> cmd_args: for f in cmis: p = paths.dirname(f.short_path) if not p in seen_dirs: - include_paths.append(cmd_args(f).parent()) + include_paths.append(cmd_args(f, parent = 1)) seen_dirs[p] = None for f in cmos: p = paths.dirname(f.short_path) if not p in seen_dirs: - include_paths.append(cmd_args(f).parent()) + include_paths.append(cmd_args(f, parent = 1)) seen_dirs[p] = None - include_paths = cmd_args(include_paths) - include_paths.hidden(cmis + cmos) + include_paths = cmd_args( + include_paths, + hidden = cmis + cmos, + ) return include_paths def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: @@ -626,7 +635,7 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: cmd_nat.add("-o", cmxa.as_output()) if len([s for s in ctx.attrs.srcs if s.extension == ".ml"]) != 0: native_c_lib = ctx.actions.declare_output("lib" + ctx.attrs.name + ".a") - cmd_nat.hidden(native_c_lib.as_output()) + cmd_nat.add(cmd_args(hidden = native_c_lib.as_output())) native_c_libs = [native_c_lib] else: native_c_libs = [] @@ -637,7 +646,7 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: # These were produced by the compile step and so are hidden dependencies of # the archive step. - cmd_nat.hidden(cmxs, cmis_nat, objs, cmts_nat, cmtis_nat) + cmd_nat.add(cmd_args(hidden = [cmxs, cmis_nat, objs, cmts_nat, cmtis_nat])) ctx.actions.run(cmd_nat, category = "ocaml_archive_native") cmxs_order, stbs_byt, _objs, cmis_byt, cmos, _cmxs, cmts_byt, cmtis_byt, _ppmlis, _ppmls = _compile_result_to_tuple(_compile(ctx, ocamlc, BuildMode("bytecode"))) @@ -652,7 +661,7 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: # These were produced by the compile step and so are hidden dependencies of # the archive step. - cmd_byt.hidden(cmos, cmis_byt, cmts_byt, cmtis_byt) + cmd_byt.add(cmd_args(hidden = [cmos, cmis_byt, cmts_byt, cmtis_byt])) ctx.actions.run(cmd_byt, category = "ocaml_archive_bytecode") infos = _attr_deps_ocaml_link_infos(ctx) @@ -731,6 +740,7 @@ def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: dep_link_infos = _attr_deps_merged_link_infos(ctx) + filter(None, [ocaml_toolchain.libc]) + [d.get(MergedLinkInfo) for d in ocaml_toolchain_runtime_deps] cxx_toolchain = get_cxx_toolchain_info(ctx) link_args_output = make_link_args( + ctx, ctx.actions, cxx_toolchain, [get_link_args_for_strategy(ctx, dep_link_infos, LinkStrategy("static_pic"))], @@ -756,7 +766,7 @@ def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: # These were produced by the compile step and are therefore hidden # dependencies of the link step. - cmd_nat.hidden(cmxs, cmis_nat, cmts_nat, cmtis_nat, objs, link_args_output.hidden) + cmd_nat.add(cmd_args(hidden = [cmxs, cmis_nat, cmts_nat, cmtis_nat, objs, link_args_output.hidden])) binary_nat = ctx.actions.declare_output(ctx.attrs.name + ".opt") cmd_nat.add([cmd_args(["-cclib", f]) for f in ocaml_toolchain.runtime_dep_link_flags]) @@ -770,7 +780,7 @@ def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: # These were produced by the compile step and are therefore hidden # dependencies of the link step. - cmd_byt.hidden(cmos, cmis_byt, cmts_byt, cmtis_byt, link_args_output.hidden) + cmd_byt.add(cmd_args(hidden = [cmos, cmis_byt, cmts_byt, cmtis_byt, link_args_output.hidden])) binary_byt = ctx.actions.declare_output(ctx.attrs.name) cmd_byt.add("-custom") cmd_byt.add([cmd_args(["-cclib", f]) for f in ocaml_toolchain.runtime_dep_link_flags]) @@ -823,6 +833,7 @@ def ocaml_object_impl(ctx: AnalysisContext) -> list[Provider]: dep_link_infos = _attr_deps_merged_link_infos(ctx) cxx_toolchain = get_cxx_toolchain_info(ctx) link_args_output = make_link_args( + ctx, ctx.actions, cxx_toolchain, [get_link_args_for_strategy(ctx, dep_link_infos, LinkStrategy("static_pic"))], @@ -840,10 +851,10 @@ def ocaml_object_impl(ctx: AnalysisContext) -> list[Provider]: for lib in merge_ocaml_link_infos(_attr_deps_ocaml_link_infos(ctx)).info: cmd.add(lib.cmxas, lib.c_libs, lib.native_c_libs, lib.stbs_nat) - cmd.hidden(lib.cmxs, lib.cmis_nat, lib.cmts_nat) + cmd.add(cmd_args(hidden = [lib.cmxs, lib.cmis_nat, lib.cmts_nat])) cmd.add(stbs, "-args", cmxs_order) - cmd.hidden(cmxs, cmis, cmts, objs, cmtis, link_args_output.hidden) + cmd.add(cmd_args(hidden = [cmxs, cmis, cmts, objs, cmtis, link_args_output.hidden])) obj = ctx.actions.declare_output(ctx.attrs.name + ".o") cmd.add("-output-complete-obj") @@ -922,6 +933,7 @@ def ocaml_shared_impl(ctx: AnalysisContext) -> list[Provider]: dep_link_infos = _attr_deps_merged_link_infos(ctx) + filter(None, [ocaml_toolchain.libc]) cxx_toolchain = get_cxx_toolchain_info(ctx) link_args_output = make_link_args( + ctx, ctx.actions, cxx_toolchain, [get_link_args_for_strategy(ctx, dep_link_infos, LinkStrategy("static_pic"))], @@ -946,7 +958,7 @@ def ocaml_shared_impl(ctx: AnalysisContext) -> list[Provider]: # These were produced by the compile step and are therefore hidden # dependencies of the link step. - cmd_nat.hidden(cmxs, cmis_nat, cmts_nat, cmtis_nat, objs, link_args_output.hidden) + cmd_nat.add(cmd_args(hidden = [cmxs, cmis_nat, cmts_nat, cmtis_nat, objs, link_args_output.hidden])) binary_nat = ctx.actions.declare_output(ctx.attrs.name + ".cmxs") cmd_nat.add("-shared") cmd_nat.add("-o", binary_nat.as_output()) diff --git a/prelude/os/BUCK.v2 b/prelude/os/BUCK.v2 index 816fd176473..04721a42b79 100644 --- a/prelude/os/BUCK.v2 +++ b/prelude/os/BUCK.v2 @@ -1,3 +1,11 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + +prelude = native # Avoid warnings and auto-formatters + # The short list of ubiquitous, mainstream operating systems: config_setting( @@ -34,12 +42,12 @@ config_setting( visibility = ["PUBLIC"], ) -native.constraint_setting( +prelude.constraint_setting( name = "maybe_building_android_binary", visibility = ["prelude//..."], ) -native.constraint_value( +prelude.constraint_value( name = "building_android_binary", constraint_setting = ":maybe_building_android_binary", visibility = ["prelude//..."], @@ -114,6 +122,23 @@ config_setting( visibility = ["PUBLIC"], ) +config_setting( + name = "linux-sgx", + constraint_values = [ + "//os/constraints:linux", + ], + visibility = ["PUBLIC"], +) + +config_setting( + name = "linux-arm64", + constraint_values = [ + "//cpu/constraints:arm64", + "//os/constraints:linux", + ], + visibility = ["PUBLIC"], +) + # For platforms with no OS, like microcontrollers. config_setting( name = "none", diff --git a/prelude/os/constraints/BUCK.v2 b/prelude/os/constraints/BUCK.v2 index cdb63a7a8b8..0426226eb08 100644 --- a/prelude/os/constraints/BUCK.v2 +++ b/prelude/os/constraints/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + # Used by open source projects to support `prelude//` constraint_setting( diff --git a/prelude/os_lookup/targets/BUCK.v2 b/prelude/os_lookup/targets/BUCK.v2 index 9919027f6a3..e6ecd06544d 100644 --- a/prelude/os_lookup/targets/BUCK.v2 +++ b/prelude/os_lookup/targets/BUCK.v2 @@ -1,4 +1,9 @@ -load("//os_lookup:defs.bzl", "os_lookup") +load("@prelude//os_lookup:defs.bzl", "os_lookup") +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() os_lookup( name = "os_lookup", diff --git a/prelude/platforms/BUCK b/prelude/platforms/BUCK deleted file mode 100644 index 12eab655917..00000000000 --- a/prelude/platforms/BUCK +++ /dev/null @@ -1,25 +0,0 @@ -# Used by open source projects to provide a simple platform setting -# This file exports a sub-set of the definitions from TARGETS.v2 for backwards-compatibility with buck1. -# NOTE: These have no effect in BUCK1 and are only provided so imports can resolve. - -oncall("build_infra") - -config_setting( - name = "runs_remote", - visibility = ["PUBLIC"], -) - -config_setting( - name = "runs_local", - visibility = ["PUBLIC"], -) - -config_setting( - name = "runs_only_local", - visibility = ["PUBLIC"], -) - -config_setting( - name = "runs_only_remote", - visibility = ["PUBLIC"], -) diff --git a/prelude/platforms/BUCK.v2 b/prelude/platforms/BUCK.v2 index d10b161f8c2..30302244bda 100644 --- a/prelude/platforms/BUCK.v2 +++ b/prelude/platforms/BUCK.v2 @@ -1,7 +1,12 @@ # Used by open source projects to provide a simple platform setting +load("@prelude//utils:source_listing.bzl", "source_listing") load(":defs.bzl", "execution_platform", "host_configuration") +oncall("build_infra") + +source_listing() + prelude = native execution_platform( @@ -68,8 +73,8 @@ prelude.constraint_value( # execution configuration, but that's not implemented yet. export_file( name = "fat_platform_incompatible", + src = "BUCK.v2", # @oss-enable # @oss-disable: src = "TARGETS.v2", - src = "BUCK", # @oss-enable target_compatible_with = select({ ":fat_platform_enabled": ["config//:none"], "DEFAULT": [], diff --git a/prelude/platforms/apple/arch.bzl b/prelude/platforms/apple/arch.bzl index f6258b04b61..72163c98b81 100644 --- a/prelude/platforms/apple/arch.bzl +++ b/prelude/platforms/apple/arch.bzl @@ -5,6 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -AppleArches = ["i386", "x86_64", "arm64", "arm64_32", "armv7k"] +_APPLE_ARCHES = [ + "arm64", + "arm64_32", + "x86_64", +] -AppleArch = enum(*AppleArches) +AppleArch = enum(*_APPLE_ARCHES) diff --git a/prelude/platforms/apple/base.bzl b/prelude/platforms/apple/base.bzl new file mode 100644 index 00000000000..90a87848abb --- /dev/null +++ b/prelude/platforms/apple/base.bzl @@ -0,0 +1,97 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:is_buck2.bzl", "is_buck2") # @oss-enable +load( + "@prelude//platforms/apple:build_mode.bzl", + "APPLE_BUILD_MODES", + "CONSTRAINT_PACKAGE", + "get_build_mode", + "get_build_mode_debug", +) +load( + "@prelude//platforms/apple:constants.bzl", + "ios_platforms", + "mac_catalyst_platforms", + "mac_platforms", + "watch_platforms", +) + +# Debug constraints to add for build modes used by other rule platforms (ex: rust). +_DEBUG_CONSTRAINTS = [ + # @oss-disable: "ovr_config//build_mode/constraints:debug", +] + +# Release constraints to add for build modes used by other rule platforms (ex: rust). +_RELEASE_CONSTRAINTS = [ + # @oss-disable: "ovr_config//build_mode/constraints:release", +] + +BUILD_MODE_TO_CONSTRAINTS_MAP = { + build_mode: ["{}:{}".format(CONSTRAINT_PACKAGE, build_mode)] + (_DEBUG_CONSTRAINTS if build_mode == get_build_mode_debug() else _RELEASE_CONSTRAINTS) + for build_mode in APPLE_BUILD_MODES +} + +_MOBILE_PLATFORMS = [ + ios_platforms.IPHONEOS_ARM64, + ios_platforms.IPHONESIMULATOR_ARM64, + ios_platforms.IPHONESIMULATOR_X86_64, + watch_platforms.WATCHOS_ARM64, + watch_platforms.WATCHOS_ARM64_32, + watch_platforms.WATCHSIMULATOR_ARM64, + watch_platforms.WATCHSIMULATOR_X86_64, +] + +_MAC_PLATFORMS = [ + mac_platforms.MACOS_ARM64, + mac_platforms.MACOS_X86_64, + mac_platforms.MACOS_UNIVERSAL, + mac_catalyst_platforms.MACCATALYST_ARM64, + mac_catalyst_platforms.MACCATALYST_X86_64, +] + +# TODO: Drop the platform_rule when we're not longer attempting to support buck1. +def apple_generated_platforms(name, constraint_values, deps, platform_rule, platform = None): + # By convention, the cxx.default_platform is typically the same as the platform being defined. + # This is not the case for all watch platforms, so provide an override. + platform = platform if platform else name + if is_mobile_platform(platform) or is_buck2_mac_platform(platform): + for build_mode in APPLE_BUILD_MODES: + platform_rule( + name = _get_generated_name(name, platform, build_mode), + constraint_values = constraint_values + BUILD_MODE_TO_CONSTRAINTS_MAP.get(build_mode), + visibility = ["PUBLIC"], + deps = deps, + ) + + # Create a platform without the build mode to support backwards compatibility of hardcoded platforms + # and with buck1 cxx platform setup. + # TODO(chatatap): Look to remove all hardcoded references and get rid of these + platform_rule( + name = name, + constraint_values = constraint_values, + visibility = ["PUBLIC"], + deps = deps, + ) + +def apple_build_mode_backed_platform(name, platform, build_mode = None): + build_mode = get_build_mode() if build_mode == None else build_mode + return _get_generated_name(name, platform, build_mode) + +def is_mobile_platform(platform): + # These builds modes are primarily used in mobile code. MacOS builds in fbcode/arvr use different + # modes to represent dev/opt variants. + return platform in _MOBILE_PLATFORMS + +def is_buck2_mac_platform(platform): + return platform in _MAC_PLATFORMS + +def _get_generated_name(name, platform, build_mode): + if is_mobile_platform(platform) or is_buck2_mac_platform(platform): + return "{}-{}".format(name, build_mode) + else: + return name diff --git a/prelude/platforms/apple/build_mode.bzl b/prelude/platforms/apple/build_mode.bzl new file mode 100644 index 00000000000..7200555022a --- /dev/null +++ b/prelude/platforms/apple/build_mode.bzl @@ -0,0 +1,37 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# @oss-disable: load("@prelude//platforms/apple/meta_only:build_mode.bzl", _APPLE_BUILD_MODES = "APPLE_BUILD_MODES", _BUILD_MODE = "BUILD_MODE", _get_build_mode = "get_build_mode", _get_build_mode_debug = "get_build_mode_debug", _get_build_mode_release = "get_build_mode_release") + +BUILD_MODE_DEBUG = "debug" # @oss-enable +BUILD_MODE_PROFILE = "profile" # @oss-enable +BUILD_MODE_RELEASE = "release" # @oss-enable + +APPLE_BUILD_MODES = [BUILD_MODE_DEBUG, BUILD_MODE_PROFILE, BUILD_MODE_RELEASE] # @oss-enable +# @oss-disable: APPLE_BUILD_MODES = _APPLE_BUILD_MODES + +BUILD_MODE = struct( # @oss-enable + DEBUG = BUILD_MODE_DEBUG, # @oss-enable + PROFILE = BUILD_MODE_PROFILE, # @oss-enable + RELEASE = BUILD_MODE_RELEASE, # @oss-enable +) # @oss-enable +# @oss-disable: BUILD_MODE = _BUILD_MODE + +CONSTRAINT_PACKAGE = "prelude//platforms/apple/constraints" # @oss-enable +# @oss-disable: CONSTRAINT_PACKAGE = "ovr_config//build_mode/apple/constraints" + +def get_build_mode(): + return read_root_config("apple", "build_mode", BUILD_MODE_DEBUG) # @oss-enable + # @oss-disable: return _get_build_mode() + +def get_build_mode_debug(): + return BUILD_MODE.DEBUG # @oss-enable + # @oss-disable: return _get_build_mode_debug() + +def get_build_mode_release(): + return BUILD_MODE.RELEASE # @oss-enable + # @oss-disable: return _get_build_mode_release() diff --git a/prelude/platforms/apple/constants.bzl b/prelude/platforms/apple/constants.bzl new file mode 100644 index 00000000000..a5dcbe3fc90 --- /dev/null +++ b/prelude/platforms/apple/constants.bzl @@ -0,0 +1,109 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# These are identifiers used in defining Apple platforms for configuring apple_* rules. + +APPLE = "Apple" + +# Apple SDK Definitions +APPLETVOS = "appletvos" + +IOS = "ios" + +MACOSX = "macosx" + +WATCHOS = "watchos" + +VISIONOS = "visionos" + +# Apple TV Platforms/Flavors + +APPLETVOS_ARM64 = "appletvos-arm64" + +APPLETVSIMULATOR_ARM64 = "appletvsimulator-arm64" + +APPLETVSIMULATOR_X86_64 = "appletvsimulator-x86_64" + +# iOS Platforms/Flavors + +IPHONEOS_ARM64 = "iphoneos-arm64" + +IPHONESIMULATOR_ARM64 = "iphonesimulator-arm64" + +IPHONESIMULATOR_X86_64 = "iphonesimulator-x86_64" + +# Mac Catalyst Platforms/Flavors + +MACCATALYST_ARM64 = "maccatalyst-arm64" + +MACCATALYST_X86_64 = "maccatalyst-x86_64" + +# Mac OS X Platforms/Flavors + +MACOS_ARM64 = "macosx-arm64" + +MACOS_X86_64 = "macosx-x86_64" + +MACOS_UNIVERSAL = "macosx-universal" + +# Watch OS Platforms/Flavors + +WATCHOS_ARM64 = "watchos-arm64" + +WATCHOS_ARM64_32 = "watchos-arm64_32" + +WATCHSIMULATOR_ARM64 = "watchsimulator-arm64" + +WATCHSIMULATOR_X86_64 = "watchsimulator-x86_64" + +# Vision OS Platforms/Flavors +VISIONOS_ARM64 = "visionos-arm64" + +VISIONSIMULATOR_ARM64 = "visionsimulator-arm64" + +apple_sdks = struct( + IOS = IOS, + WATCHOS = WATCHOS, + MACOSX = MACOSX, + APPLETVOS = APPLETVOS, + VISIONOS = VISIONOS, +) + +appletv_platforms = struct( + APPLETVOS_ARM64 = APPLETVOS_ARM64, + APPLETVSIMULATOR_ARM64 = APPLETVSIMULATOR_ARM64, + APPLETVSIMULATOR_X86_64 = APPLETVSIMULATOR_X86_64, +) + +ios_platforms = struct( + IPHONEOS_ARM64 = IPHONEOS_ARM64, + IPHONESIMULATOR_ARM64 = IPHONESIMULATOR_ARM64, + IPHONESIMULATOR_X86_64 = IPHONESIMULATOR_X86_64, +) + +mac_catalyst_platforms = struct( + MACCATALYST_ARM64 = MACCATALYST_ARM64, + MACCATALYST_X86_64 = MACCATALYST_X86_64, +) + +mac_platforms = struct( + MACOS_ARM64 = MACOS_ARM64, + MACOS_X86_64 = MACOS_X86_64, + MACOS_UNIVERSAL = MACOS_UNIVERSAL, +) + +watch_platforms = struct( + WATCHOS_ARM64 = WATCHOS_ARM64, + WATCHOS_ARM64_32 = WATCHOS_ARM64_32, + WATCHSIMULATOR_ARM64 = WATCHSIMULATOR_ARM64, + WATCHSIMULATOR_X86_64 = WATCHSIMULATOR_X86_64, +) + +vision_platforms = struct( + VISIONOS_ARM64 = VISIONOS_ARM64, + VISIONSIMULATOR_ARM64 = VISIONSIMULATOR_ARM64, +) diff --git a/prelude/platforms/apple/platforms.bzl b/prelude/platforms/apple/platforms.bzl new file mode 100644 index 00000000000..8f3fc038225 --- /dev/null +++ b/prelude/platforms/apple/platforms.bzl @@ -0,0 +1,241 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//apple:apple_platforms.bzl", "APPLE_PLATFORMS_KEY") +load("@prelude//platforms/apple:base.bzl", "BUILD_MODE_TO_CONSTRAINTS_MAP", "apple_build_mode_backed_platform", "is_buck2_mac_platform", "is_mobile_platform") +load( + "@prelude//platforms/apple:build_mode.bzl", + "APPLE_BUILD_MODES", + "get_build_mode", + "get_build_mode_debug", +) +load( + "@prelude//platforms/apple:constants.bzl", + "ios_platforms", + "mac_catalyst_platforms", + "mac_platforms", +) +load("@prelude//platforms/apple:platforms_map.bzl", "APPLE_PLATFORMS_MAP") +load("@prelude//utils:buckconfig.bzl", "read") + +_SUPPORTED_IOS_PLATFORMS = [ + ios_platforms.IPHONEOS_ARM64, + ios_platforms.IPHONESIMULATOR_ARM64, + ios_platforms.IPHONESIMULATOR_X86_64, +] + +_SUPPORTED_MACOS_PLATFORMS = [ + mac_platforms.MACOS_ARM64, + mac_platforms.MACOS_X86_64, +] + +_SUPPORTED_MAC_CATALYST_PLATFORMS = [ + mac_catalyst_platforms.MACCATALYST_ARM64, + mac_catalyst_platforms.MACCATALYST_X86_64, +] + +_ANALYSIS_CONSTRAINTS = ["ovr_config//bitcode/constraints:bitcode"] +_DEFAULT_ANALYSIS_IOS_PLATFORM = ios_platforms.IPHONEOS_ARM64 +_DEFAULT_ANALYSIS_MACOS_PLATFORM = mac_platforms.MACOS_X86_64 + +DEFAULT_SUPPORTED_CXX_PLATFORMS = _SUPPORTED_IOS_PLATFORMS + +def apple_target_platforms( + base_name, + platform_rule, + constraint_values = None, # Constraint values added to all generated platforms + visibility = None, + deps = None, + cxx_platforms_constraint_values = None, # Must be a map of a supported cxx platform to a list of constraint values + build_mode_constraint_values = None, # Must be a map of a supported build mode to a list of constraint values + supported_cxx_platforms = DEFAULT_SUPPORTED_CXX_PLATFORMS, # Cxx platforms to generate platforms for + supported_build_modes = APPLE_BUILD_MODES): # Build modes to generate platforms for + """ Define architecture and sdk specific platforms alongside the base platform. """ + + # HACK: Apps shouldn't be generating platforms for cxx_platforms they don't support. However, to support cases where other apps + # depend on shared libraries that don't generate particular platforms, and set a cxx.default_platform on the command line, we need + # to make the graph parseable and generate the missing target platforms. They will never be used, but need to exist in the config + # backed world. + config_based_platform = read("cxx", "default_platform") + if config_based_platform != None and config_based_platform not in supported_cxx_platforms: + supported_cxx_platforms = list(supported_cxx_platforms) + if config_based_platform in _SUPPORTED_MACOS_PLATFORMS: + for p in _SUPPORTED_MACOS_PLATFORMS: + if p not in supported_cxx_platforms: + supported_cxx_platforms.append(p) + + if config_based_platform in _SUPPORTED_MAC_CATALYST_PLATFORMS: + for p in _SUPPORTED_MAC_CATALYST_PLATFORMS: + if p not in supported_cxx_platforms: + supported_cxx_platforms.append(p) + + # Form defaults + constraint_values = constraint_values or [] + cxx_platforms_constraint_values = cxx_platforms_constraint_values or {} + build_mode_constraint_values = build_mode_constraint_values or {} + visibility = visibility or ["PUBLIC"] + deps = deps or [] + + _validate_cxx_platforms_constraint_values(base_name, cxx_platforms_constraint_values, supported_cxx_platforms) + _validate_build_mode_constraint_values(base_name, build_mode_constraint_values, supported_build_modes) + + # Define the generated platforms + for platform in supported_cxx_platforms: + platform_dep = get_default_target_platform_for_platform(platform) + cxx_platform_constraints = cxx_platforms_constraint_values.get(platform, []) + if is_mobile_platform(platform) or is_buck2_mac_platform(platform): + for build_mode in supported_build_modes: + build_mode_constraints = build_mode_constraint_values.get(build_mode, []) + BUILD_MODE_TO_CONSTRAINTS_MAP.get(build_mode) + _define_platform( + base_name, + platform, + build_mode, + constraint_values + cxx_platform_constraints + build_mode_constraints, + visibility, + deps + [platform_dep], + platform_rule, + ) + else: + _define_platform( + base_name, + platform, + None, + constraint_values + cxx_platform_constraints, + visibility, + deps + [platform_dep], + platform_rule, + ) + + # Define the base platform in case it is needed (example: to be a dep of another platform) + platform_rule( + name = base_name, + constraint_values = constraint_values, + visibility = visibility, + deps = deps, + ) + + analysis_platform = _get_analysis_platform_for_supported_platforms(supported_cxx_platforms) + analysis_platform_dep = get_default_target_platform_for_platform(analysis_platform) + analysis_platform_build_mode_constraints = build_mode_constraint_values.get(get_build_mode_debug(), []) + + platform_rule( + name = base_name + "-analysis", + constraint_values = constraint_values + analysis_platform_build_mode_constraints + _ANALYSIS_CONSTRAINTS, + visibility = ["PUBLIC"], + deps = deps + [analysis_platform_dep], + ) + +def config_backed_apple_target_platform(target_platform = None, platform = None, build_mode = None): + platform = _get_default_platform() if platform == None else platform + build_mode = get_build_mode() if build_mode == None else build_mode + if target_platform == None: + return get_default_target_platform_for_platform(platform) + + return _get_generated_name(target_platform, platform, build_mode) + +def get_default_target_platform_for_platform(sdk_arch) -> [str, None]: + data = APPLE_PLATFORMS_MAP.get(sdk_arch) + if data != None: + return data.target_platform + + return None + +def set_apple_platforms(platform, base_config_backed_target_platform, kwargs): + def get_supported_platforms(): + if platform in _SUPPORTED_IOS_PLATFORMS: + return _SUPPORTED_IOS_PLATFORMS + elif platform in _SUPPORTED_MACOS_PLATFORMS: + return _SUPPORTED_MACOS_PLATFORMS + elif platform in _SUPPORTED_MAC_CATALYST_PLATFORMS: + return _SUPPORTED_MAC_CATALYST_PLATFORMS + else: + return None + + supported_platforms = get_supported_platforms() + if not supported_platforms: + return kwargs + + # If we've already defined the apple platforms, we can avoid having to process them again. + if APPLE_PLATFORMS_KEY in kwargs: + return kwargs + + apple_platforms = {} + for platform in supported_platforms: + for build_mode in APPLE_BUILD_MODES: + identifier = "{}-{}".format(platform, build_mode) + if base_config_backed_target_platform: + apple_platforms[identifier] = config_backed_apple_target_platform(base_config_backed_target_platform, platform, build_mode) + else: + base_target_platform = _get_base_target_platform_for_platform(platform) + if not base_target_platform: + fail("A valid base target platform is required!") + apple_platforms[identifier] = apple_build_mode_backed_platform(base_target_platform, platform, build_mode) + + kwargs[APPLE_PLATFORMS_KEY] = apple_platforms + + return kwargs + +def _get_generated_name(base_name, platform, build_mode): + platform_and_build_mode_name = apple_build_mode_backed_platform(platform, platform, build_mode) + return "{}-{}".format(base_name, platform_and_build_mode_name) + +def _get_default_platform(): + platform = read("cxx", "default_platform") + return platform if platform != None else ios_platforms.IPHONESIMULATOR_X86_64 + +def _define_platform(base_name, platform, build_mode, constraint_values, visibility, deps, platform_rule): + # @lint-ignore BUCKLINT - We set the visibility to PUBLIC directly and can bypass fb_native + platform_rule( + name = _get_generated_name(base_name, platform, build_mode), + constraint_values = constraint_values, + visibility = visibility, + deps = deps, + ) + +def _get_base_target_platform_for_platform(sdk_arch) -> [str, None]: + data = APPLE_PLATFORMS_MAP.get(sdk_arch) + if data != None: + return data.base_target_platform + + return None + +def _get_analysis_platform_for_supported_platforms(supported_cxx_platforms): + # For determining the platform deps to use for the base platform, we inspect the supported + # cxx platforms, giving precedence to iOS platforms. + for platform in _SUPPORTED_IOS_PLATFORMS: + if platform in supported_cxx_platforms: + return _DEFAULT_ANALYSIS_IOS_PLATFORM + + for platform in _SUPPORTED_MACOS_PLATFORMS: + if platform in supported_cxx_platforms: + return _DEFAULT_ANALYSIS_MACOS_PLATFORM + + return _DEFAULT_ANALYSIS_IOS_PLATFORM + +def _validate_cxx_platforms_constraint_values(base_name, cxx_platforms_constraint_values, supported_cxx_platforms): + if type(cxx_platforms_constraint_values) != type({}): + fail("cxx_platforms_constraint_values must be a map of platform to constraint values!") + for platform, platform_values in cxx_platforms_constraint_values.items(): + if platform not in supported_cxx_platforms: + fail("\n\nProviding platform constraints for an unsupported platform!\nBase platform: {}\nCXX Platform: {} with values {}\nSupported platforms: {}\n".format( + base_name, + platform, + platform_values, + ", ".join(supported_cxx_platforms), + )) + +def _validate_build_mode_constraint_values(base_name, build_mode_constraint_values, supported_build_modes): + if type(build_mode_constraint_values) != type({}): + fail("build_mode_constraint_values must be a map of build mode to constraint values!") + for build_mode, build_mode_values in build_mode_constraint_values.items(): + if build_mode not in supported_build_modes: + fail("\n\nProviding build mode constraints for an unsupported build mode!\nBase platform: {}\nBuild mode: {} with values {}\nSupported build modes: {}\n".format( + base_name, + build_mode, + build_mode_values, + ", ".join(supported_build_modes), + )) diff --git a/prelude/platforms/apple/platforms_map.bzl b/prelude/platforms/apple/platforms_map.bzl new file mode 100644 index 00000000000..9e15a662cd3 --- /dev/null +++ b/prelude/platforms/apple/platforms_map.bzl @@ -0,0 +1,14 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# @oss-disable: load("@prelude//platforms/apple/meta_only:platforms_map.bzl", _APPLE_PLATFORMS_MAP = "APPLE_PLATFORMS_MAP", _APPLE_SDK_DEFAULT_PLATFORM_MAP = "APPLE_SDK_DEFAULT_PLATFORM_MAP") + +APPLE_PLATFORMS_MAP = {} # TODO: Define OSS platforms map # @oss-enable +# @oss-disable: APPLE_PLATFORMS_MAP = _APPLE_PLATFORMS_MAP + +APPLE_SDK_DEFAULT_PLATFORM_MAP = {} # @oss-enable +# @oss-disable: APPLE_SDK_DEFAULT_PLATFORM_MAP = _APPLE_SDK_DEFAULT_PLATFORM_MAP diff --git a/prelude/platforms/apple/sdk.bzl b/prelude/platforms/apple/sdk.bzl index 857896304bc..6ea7e1f0cb2 100644 --- a/prelude/platforms/apple/sdk.bzl +++ b/prelude/platforms/apple/sdk.bzl @@ -5,6 +5,18 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -AppleSdks = ["iphoneos", "iphonesimulator", "maccatalyst", "macosx", "visionos", "visionsimulator", "watchos", "watchsimulator"] +_APPLE_SDKS = [ + "iphoneos", + "iphonesimulator", + "maccatalyst", + "macosx", + "visionos", + "visionsimulator", + "watchos", + "watchsimulator", + # Marker entry used to help toolchain selectors define a set of + # tools outside the apple_toolchain definition. + "toolchain-tool", +] -AppleSdk = enum(*AppleSdks) +AppleSdk = enum(*_APPLE_SDKS) diff --git a/prelude/python/compile.bzl b/prelude/python/compile.bzl index 4d6175ac2b2..ba29eb85f0d 100644 --- a/prelude/python/compile.bzl +++ b/prelude/python/compile.bzl @@ -29,21 +29,46 @@ def compile_manifests_for_mode( invalidation_mode: PycInvalidationMode = PycInvalidationMode("UNCHECKED_HASH")) -> ManifestInfo: output = ctx.actions.declare_output("bytecode_{}".format(invalidation_mode.value), dir = True) bytecode_manifest = ctx.actions.declare_output("bytecode_{}.manifest".format(invalidation_mode.value)) - cmd = cmd_args(ctx.attrs._python_toolchain[PythonToolchainInfo].host_interpreter) - cmd.add(ctx.attrs._python_toolchain[PythonToolchainInfo].compile) - cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(bytecode_manifest.as_output(), format = "--bytecode-manifest={}")) - cmd.add("--invalidation-mode={}".format(invalidation_mode.value)) + cmd = [ + ctx.attrs._python_toolchain[PythonToolchainInfo].host_interpreter, + ctx.attrs._python_toolchain[PythonToolchainInfo].compile, + cmd_args(output.as_output(), format = "--output={}"), + cmd_args(bytecode_manifest.as_output(), format = "--bytecode-manifest={}"), + "--invalidation-mode={}".format(invalidation_mode.value), + ] - for manifest in manifests: - cmd.add(manifest.manifest) - cmd.hidden([a for a, _ in manifest.artifacts]) - ctx.actions.run( - cmd, + env = { # On some platforms (e.g. linux), python hash code randomness can cause # the bytecode to be non-deterministic, so pin via the `PYTHONHASHSEED` # env var. - env = {"PYTHONHASHSEED": "7"}, + "PYTHONHASHSEED": "7", + } + + # support invalidating cached pyc compile actions by bumping the env var. + # the value itself is meaningless, just the fact it changes is meaningful. + # using the PYC magic number for *convenience* only + version = ctx.attrs._python_toolchain[PythonToolchainInfo].version + if version and "cinder" in version: + # for Cinder, this is a workaround... + # this action *should* use the bundled (in-repo) runtime for compilation + # (and then the change in the Cinder codebase would be sufficient to invalidate caches) + # currently though, the action uses the platform Cinder for PYC compilation, + # and these are deployed in-place (no change to toolchain paths), + # so we need to force cache invalidation when needed (e.g. for S411091) + env["CINDER_DUMMY_PYC_CACHE_BUSTER"] = "3451" + elif version and "3.12" in version: + # for CPython, the magic number *shouldn't* change during the lifetime of a feature release + # but internally we do make more signifcant changes (rarely), + # so for those cases we support forced invalidation using this env var + env["PYTHON312_DUMMY_PYC_CACHE_BUSTER"] = "3532" + + hidden = [] + for manifest in manifests: + cmd.append(manifest.manifest) + hidden.extend([a for a, _ in manifest.artifacts]) + ctx.actions.run( + cmd_args(cmd, hidden = hidden), + env = env, category = "py_compile", identifier = invalidation_mode.value, ) diff --git a/prelude/python/cxx_python_extension.bzl b/prelude/python/cxx_python_extension.bzl index 618d0437402..f583371408d 100644 --- a/prelude/python/cxx_python_extension.bzl +++ b/prelude/python/cxx_python_extension.bzl @@ -18,7 +18,6 @@ load( "@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo") load( "@prelude//cxx:cxx_types.bzl", "CxxRuleConstructorParams", @@ -41,7 +40,6 @@ load( "LibOutputStyle", "LinkInfo", "LinkInfos", - "Linkage", "create_merged_link_info", "wrap_link_infos", ) @@ -60,8 +58,10 @@ load( "@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", ) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") -load("@prelude//python:toolchain.bzl", "PythonPlatformInfo", "get_platform_attr") +load("@prelude//python:toolchain.bzl", "PythonPlatformInfo", "PythonToolchainInfo", "get_platform_attr") +load("@prelude//unix:providers.bzl", "UnixEnv", "create_unix_env_info") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "value_or") load(":manifest.bzl", "create_manifest_for_source_map") @@ -99,6 +99,7 @@ def cxx_python_extension_impl(ctx: AnalysisContext) -> list[Provider]: compilation_database = True, default = False, # We need to do some postprocessing to make sure the shared library is our default output java_packaging_info = False, + java_global_code_info = False, linkable_graph = False, # We create this here so we can correctly apply exclusions link_style_outputs = False, merged_native_link_info = False, @@ -110,6 +111,8 @@ def cxx_python_extension_impl(ctx: AnalysisContext) -> list[Provider]: preprocessor_for_tests = False, ) + python_toolchain = ctx.attrs._python_toolchain[PythonToolchainInfo] + impl_params = CxxRuleConstructorParams( build_empty_so = True, rule_type = "cxx_python_extension", @@ -119,6 +122,15 @@ def cxx_python_extension_impl(ctx: AnalysisContext) -> list[Provider]: use_soname = False, generate_providers = cxx_providers, generate_sub_targets = sub_targets, + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + extra_link_flags = python_toolchain.extension_linker_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, ) cxx_library_info = cxx_library_parameterized(ctx, impl_params) @@ -214,6 +226,7 @@ def cxx_python_extension_impl(ctx: AnalysisContext) -> list[Provider]: deps = [d.shared_library_info for d in link_deps], ), linkable_root_info = create_linkable_root( + label = ctx.label, link_infos = wrap_link_infos( link_infos[LibOutputStyle("pic_archive")], pre_flags = ctx.attrs.linker_flags, @@ -252,20 +265,21 @@ def cxx_python_extension_impl(ctx: AnalysisContext) -> list[Provider]: # Export library info. python_platform = ctx.attrs._python_toolchain[PythonPlatformInfo] - cxx_platform = ctx.attrs._cxx_toolchain[CxxPlatformInfo] + cxx_toolchain = ctx.attrs._cxx_toolchain raw_deps = ctx.attrs.deps raw_deps.extend( - get_platform_attr(python_platform, cxx_platform, ctx.attrs.platform_deps), + get_platform_attr(python_platform, cxx_toolchain, ctx.attrs.platform_deps), ) deps, shared_deps = gather_dep_libraries(raw_deps) - providers.append(create_python_library_info( + library_info = create_python_library_info( ctx.actions, ctx.label, extensions = qualify_srcs(ctx.label, ctx.attrs.base_module, {name: extension}), deps = deps, shared_libraries = shared_deps, src_types = src_type_manifest, - )) + ) + providers.append(library_info) # Omnibus providers @@ -288,4 +302,16 @@ def cxx_python_extension_impl(ctx: AnalysisContext) -> list[Provider]: deps = raw_deps, ) providers.append(linkable_graph) + + providers.append( + create_unix_env_info( + actions = ctx.actions, + env = UnixEnv( + label = ctx.label, + python_libs = [library_info], + ), + deps = raw_deps, + ), + ) + return providers diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 2b472647709..14e80b05903 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -12,19 +12,24 @@ execution load("@prelude//:artifact_tset.bzl", "project_artifacts") load("@prelude//:local_only.bzl", "package_python_locally") +load("@prelude//:paths.bzl", "paths") load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_is_gnu", ) load( - "@prelude//linking:link_info.bzl", - "LinkedObject", # @unused Used as a type + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type + "gen_shared_libs_action", + "zip_shlibs", ) load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:arglike.bzl", "ArgLike") load(":compile.bzl", "PycInvalidationMode") load(":interface.bzl", "EntryPoint", "EntryPointKind", "PythonLibraryManifestsInterface") load(":manifest.bzl", "ManifestInfo") # @unused Used as a type +load(":python.bzl", "manifests_to_interface") +load(":python_library.bzl", "gather_dep_libraries") load(":toolchain.bzl", "PackageStyle", "PythonToolchainInfo", "get_package_style") # This represents the input to the creation of a Pex. Manifests provide source @@ -34,7 +39,7 @@ PexModules = record( manifests = field(PythonLibraryManifestsInterface), extensions = field(ManifestInfo | None, None), extra_manifests = field(ManifestInfo | None, None), - debuginfo_manifest = field(ManifestInfo | None, None), + repl_manifests = field(PythonLibraryManifestsInterface | None, None), compile = field(bool, False), ) @@ -42,7 +47,7 @@ PexModules = record( # providers. PexProviders = record( default_output = field(Artifact), - other_outputs = list[(ArgLike, str)], + other_outputs = list[ArgLike], other_outputs_prefix = str | None, hidden_resources = list[ArgLike], sub_targets = dict[str, list[Provider]], @@ -60,11 +65,15 @@ def make_py_package_providers( def make_default_info(pex: PexProviders) -> Provider: return DefaultInfo( default_output = pex.default_output, - other_outputs = [a for a, _ in pex.other_outputs] + pex.hidden_resources, + other_outputs = pex.other_outputs + pex.hidden_resources, sub_targets = pex.sub_targets, ) -def make_run_info(pex: PexProviders) -> Provider: +def make_run_info(pex: PexProviders, run_with_inplace: bool = False) -> Provider: + if run_with_inplace and "inplace" in pex.sub_targets: + # If running with inplace, we want to use the RunInfo of inplace subtarget. + return pex.sub_targets["inplace"][1] + return RunInfo(pex.run_cmd) def _srcs(srcs: list[typing.Any], format = "{}") -> cmd_args: @@ -118,10 +127,11 @@ def make_py_package( package_style: PackageStyle, build_args: list[ArgLike], pex_modules: PexModules, - shared_libraries: dict[str, (LinkedObject, bool)], + shared_libraries: list[(str, SharedLibrary, bool)], main: EntryPoint, hidden_resources: list[ArgLike] | None, - allow_cache_upload: bool) -> PexProviders: + allow_cache_upload: bool, + debuginfo_files: list[(str | (str, SharedLibrary, str), Artifact)] = []) -> PexProviders: """ Passes a standardized set of flags to a `make_py_package` binary to create a python "executable". @@ -144,14 +154,22 @@ def make_py_package( if pex_modules.extensions: srcs.append(pex_modules.extensions.manifest) - preload_libraries = _preload_libraries_args(ctx, shared_libraries) + preload_libraries = _preload_libraries_args( + ctx = ctx, + shared_libraries = [ + (shlib, libdir) + for libdir, shlib, preload in shared_libraries + if preload + ], + ) startup_function = generate_startup_function_loader(ctx) manifest_module = generate_manifest_module(ctx, python_toolchain, srcs) common_modules_args, dep_artifacts, debug_artifacts = _pex_modules_common_args( ctx, pex_modules, [startup_function] if startup_function else [], - {name: lib for name, (lib, _) in shared_libraries.items()}, + [(shlib, libdir) for libdir, shlib, _ in shared_libraries], + debuginfo_files = debuginfo_files, ) default = _make_py_package_impl( @@ -160,7 +178,7 @@ def make_py_package( make_py_package_cmd, package_style, build_args, - shared_libraries, + len(shared_libraries) > 0, preload_libraries, common_modules_args, dep_artifacts, @@ -172,6 +190,52 @@ def make_py_package( output_suffix = "", allow_cache_upload = allow_cache_upload, ) + + # lets make a shell + if ctx.attrs.repl_main: + # no more + # kjdfhgskjh + repl_deps, _ = gather_dep_libraries(ctx.attrs.repl_only_deps) + repl_manifests = manifests_to_interface(repl_deps[0].manifests) + + repl_pex_modules = PexModules( + manifests = pex_modules.manifests, + extra_manifests = pex_modules.extra_manifests, + extensions = pex_modules.extensions, + repl_manifests = repl_manifests, + compile = pex_modules.compile, + ) + + repl_common_modules_args, repl_dep_artifacts, repl_debug_artifacts = _pex_modules_common_args( + ctx, + repl_pex_modules, + [startup_function] if startup_function else [], + [(shlib, libdir) for libdir, shlib, _ in shared_libraries], + debuginfo_files = debuginfo_files, + suffix = "_repl", + ) + + default.sub_targets["repl"] = make_py_package_providers( + _make_py_package_impl( + ctx, + python_toolchain, + make_py_package_cmd, + PackageStyle("inplace"), + build_args, + len(shared_libraries) > 0, + preload_libraries, + repl_common_modules_args, + repl_dep_artifacts, + repl_debug_artifacts, + (EntryPointKind("function"), ctx.attrs.repl_main), + hidden_resources, + manifest_module, + pex_modules, + output_suffix = "-repl", + allow_cache_upload = allow_cache_upload, + ), + ) + for style in PackageStyle.values(): pex_providers = default if style == package_style.value else _make_py_package_impl( ctx, @@ -179,7 +243,7 @@ def make_py_package( make_py_package_cmd, PackageStyle(style), build_args, - shared_libraries, + len(shared_libraries) > 0, preload_libraries, common_modules_args, dep_artifacts, @@ -204,11 +268,11 @@ def _make_py_package_impl( make_py_package_cmd: RunInfo | None, package_style: PackageStyle, build_args: list[ArgLike], - shared_libraries: dict[str, (LinkedObject, bool)], + shared_libraries: bool, preload_libraries: cmd_args, common_modules_args: cmd_args, - dep_artifacts: list[(ArgLike, str)], - debug_artifacts: list[(ArgLike, str)], + dep_artifacts: list[ArgLike], + debug_artifacts: list[(str | (str, SharedLibrary, str), ArgLike)], main: EntryPoint, hidden_resources: list[ArgLike] | None, manifest_module: ArgLike | None, @@ -219,6 +283,7 @@ def _make_py_package_impl( standalone = package_style == PackageStyle("standalone") runtime_files = [] + sub_targets = {} if standalone and hidden_resources != None: # constructing this error message is expensive, only do it when we abort analysis error_msg = "standalone builds don't support hidden resources" if output_suffix else _hidden_resources_error_message(ctx.label, hidden_resources) @@ -254,7 +319,7 @@ def _make_py_package_impl( output_suffix, ) - output = ctx.actions.declare_output("{}{}".format(name, python_toolchain.pex_extension)) + output = ctx.actions.declare_output("{}{}".format(name, ctx.attrs.extension or python_toolchain.pex_extension)) bootstrap_args = _pex_bootstrap_args( python_toolchain, @@ -274,7 +339,7 @@ def _make_py_package_impl( # For inplace builds add local artifacts to outputs so they get properly materialized runtime_files.extend(dep_artifacts) - runtime_files.append((symlink_tree_path, symlink_tree_path.short_path)) + runtime_files.append(symlink_tree_path) # For standalone builds, or builds setting make_py_package we generate args for calling make_par.py if standalone or make_py_package_cmd != None: @@ -323,27 +388,61 @@ def _make_py_package_impl( if hidden_resources == None: hidden_resources = [] + if symlink_tree_path != None: + sub_targets["link-tree"] = [DefaultInfo( + default_output = symlink_tree_path, + other_outputs = runtime_files, + sub_targets = {}, + )] + return PexProviders( default_output = output, other_outputs = runtime_files, other_outputs_prefix = symlink_tree_path.short_path if symlink_tree_path != None else None, hidden_resources = hidden_resources, - sub_targets = {}, - run_cmd = cmd_args(run_args).hidden([a for a, _ in runtime_files] + hidden_resources), + sub_targets = sub_targets, + run_cmd = cmd_args( + run_args, + hidden = runtime_files + hidden_resources + [python_toolchain.interpreter], + ), ) -def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[(ArgLike, str)]) -> list[Provider]: - out = ctx.actions.write_json("debuginfo.manifest.json", debug_artifacts) - return [DefaultInfo(default_output = out, other_outputs = [a for a, _ in debug_artifacts])] - -def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: dict[str, (LinkedObject, bool)]) -> cmd_args: - preload_libraries_path = ctx.actions.write( - "__preload_libraries.txt", - cmd_args([ - "--preload={}".format(name) - for name, (_, preload) in shared_libraries.items() - if preload - ]), +def _debuginfo_subtarget( + ctx: AnalysisContext, + debug_artifacts: list[(str | (str, SharedLibrary, str), ArgLike)]) -> list[Provider]: + for_shared_libs = [] + other = [] + for name, artifact in debug_artifacts: + if type(name) == type(()): + for_shared_libs.append((name[1], (artifact, name[0], name[2]))) + else: + other.append((artifact, name)) + out = gen_shared_libs_action( + actions = ctx.actions, + out = "debuginfo.manifest.json", + shared_libs = [shlib for shlib, _ in for_shared_libs], + gen_action = lambda actions, output, shared_libs: actions.write_json( + output, + [ + (debug, paths.join(libdir, soname + ext)) + for soname, _, (debug, libdir, ext) in zip_shlibs(shared_libs, for_shared_libs) + ] + other, + ), + ) + return [DefaultInfo(default_output = out, other_outputs = [d for _, d in debug_artifacts])] + +def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(SharedLibrary, str)]) -> cmd_args: + preload_libraries_path = gen_shared_libs_action( + actions = ctx.actions, + out = "__preload_libraries.txt", + shared_libs = [shlib for shlib, _ in shared_libraries], + gen_action = lambda actions, output, shared_libs: actions.write( + output, + [ + "--preload={}".format(paths.join(libdir, soname)) + for soname, _, libdir in zip_shlibs(shared_libs, shared_libraries) + ], + ), ) return cmd_args(preload_libraries_path, format = "@{}") @@ -351,7 +450,7 @@ def _pex_bootstrap_args( toolchain: PythonToolchainInfo, main: EntryPoint, output: Artifact, - shared_libraries: dict[str, (LinkedObject, bool)], + shared_libraries: bool, preload_libraries: cmd_args, symlink_tree_path: Artifact | None, package_style: PackageStyle, @@ -369,10 +468,9 @@ def _pex_bootstrap_args( else: cmd.add(["--main-function", main[1]]) if symlink_tree_path != None: - cmd.add(cmd_args(["--modules-dir", symlink_tree_path]).ignore_artifacts()) + cmd.add(cmd_args(["--modules-dir", symlink_tree_path], ignore_artifacts = True)) - if toolchain.main_runner: - cmd.add(["--main-runner", toolchain.main_runner]) + cmd.add(["--main-runner", toolchain.main_runner]) # Package style `inplace_lite` cannot be used with shared libraries if package_style == PackageStyle("inplace_lite") and not shared_libraries: @@ -382,13 +480,18 @@ def _pex_bootstrap_args( if package_style == PackageStyle("standalone") and not zip_safe: cmd.add("--no-zip-safe") + for lib_path in toolchain.native_library_runtime_paths: + cmd.add("--native-library-runtime-path={}".format(lib_path)) + return cmd def _pex_modules_common_args( ctx: AnalysisContext, pex_modules: PexModules, extra_manifests: list[ArgLike], - shared_libraries: dict[str, LinkedObject]) -> (cmd_args, list[(ArgLike, str)], list[(ArgLike, str)]): + shared_libraries: list[(SharedLibrary, str)], + debuginfo_files: list[(str | (str, SharedLibrary, str), Artifact)], + suffix: str = "") -> (cmd_args, list[ArgLike], list[(str | (str, SharedLibrary, str), ArgLike)]): srcs = [] src_artifacts = [] deps = [] @@ -405,89 +508,124 @@ def _pex_modules_common_args( srcs.append(pex_modules.extra_manifests.manifest) src_artifacts.extend(pex_modules.extra_manifests.artifacts) + if pex_modules.repl_manifests: + srcs.extend(pex_modules.repl_manifests.src_manifests()) + src_artifacts.extend(pex_modules.repl_manifests.src_artifacts_with_paths()) + if extra_manifests: srcs.extend(extra_manifests) - deps.extend(src_artifacts) + deps.extend([a[0] for a in src_artifacts]) resources = pex_modules.manifests.resource_manifests() - deps.extend(pex_modules.manifests.resource_artifacts_with_paths()) + deps.extend([a[0] for a in pex_modules.manifests.resource_artifacts_with_paths()]) src_manifests_path = ctx.actions.write( - "__src_manifests.txt", + "__src_manifests{}.txt".format(suffix), _srcs(srcs, format = "--module-manifest={}"), ) resource_manifests_path = ctx.actions.write( - "__resource_manifests.txt", + "__resource_manifests{}.txt".format(suffix), _srcs(resources, format = "--resource-manifest={}"), ) - native_libraries = [s.output for s in shared_libraries.values()] - native_library_srcs_path = ctx.actions.write( - "__native_libraries___srcs.txt", - _srcs(native_libraries, format = "--native-library-src={}"), - ) - native_library_dests_path = ctx.actions.write( - "__native_libraries___dests.txt", - ["--native-library-dest={}".format(lib) for lib in shared_libraries], + native_libraries = gen_shared_libs_action( + actions = ctx.actions, + out = "__native_libraries{}__.txt".format(suffix), + shared_libs = [shlib for shlib, _ in shared_libraries], + gen_action = lambda actions, output, shared_libs: actions.write( + output, + cmd_args( + _srcs( + [shlib.lib.output for shlib in shared_libs.values()], + format = "--native-library-src={}", + ), + [ + "--native-library-dest={}".format(paths.join(libdir, soname)) + for soname, _, libdir in zip_shlibs(shared_libs, shared_libraries) + ], + ), + ), ) - src_manifest_args = cmd_args(src_manifests_path).hidden(srcs) - resource_manifest_args = cmd_args(resource_manifests_path).hidden(resources) - native_library_srcs_args = cmd_args(native_library_srcs_path) + src_manifest_args = cmd_args(src_manifests_path, hidden = srcs) + resource_manifest_args = cmd_args(resource_manifests_path, hidden = resources) cmd = cmd_args() cmd.add(cmd_args(src_manifest_args, format = "@{}")) cmd.add(cmd_args(resource_manifest_args, format = "@{}")) - cmd.add(cmd_args(native_library_srcs_args, format = "@{}")) - cmd.add(cmd_args(native_library_dests_path, format = "@{}")) + cmd.add(cmd_args(native_libraries, format = "@{}")) - if pex_modules.debuginfo_manifest: - debuginfo_files = pex_modules.debuginfo_manifest.artifacts + if debuginfo_files: debuginfo_srcs_path = ctx.actions.write( - "__debuginfo___srcs.txt", - _srcs([src for src, _ in debuginfo_files], format = "--debuginfo-src={}"), + "__debuginfo___srcs{}.txt".format(suffix), + _srcs([src for _, src in debuginfo_files], format = "--debuginfo-src={}"), ) debuginfo_srcs_args = cmd_args(debuginfo_srcs_path) cmd.add(cmd_args(debuginfo_srcs_args, format = "@{}")) - debug_artifacts.extend(debuginfo_files) + for name, artifact in debuginfo_files: + if type(name) != type(""): + libdir, shlib, ext = name + name = paths.join(libdir, shlib.soname.ensure_str() + ext) + debug_artifacts.append((name, artifact)) if ctx.attrs.package_split_dwarf_dwp: if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): - # rename to match extracted debuginfo package - dwp = [(s.dwp, "{}.debuginfo.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] + dwp_ext = ".debuginfo.dwp" else: - dwp = [(s.dwp, "{}.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] - dwp_srcs_path = ctx.actions.write( - "__dwp___srcs.txt", - _srcs([src for src, _ in dwp], format = "--dwp-src={}"), - ) - dwp_dests_path = ctx.actions.write( - "__dwp___dests.txt", - _srcs([dest for _, dest in dwp], format = "--dwp-dest={}"), + dwp_ext = ".dwp" + dwp_args = gen_shared_libs_action( + actions = ctx.actions, + out = "__dwp{}__.txt".format(suffix), + shared_libs = [shlib for shlib, _ in shared_libraries], + gen_action = lambda actions, output, shared_libs: actions.write( + output, + cmd_args( + _srcs( + [ + shlib.lib.dwp + for shlib in shared_libs.values() + if shlib.lib.dwp != None + ], + format = "--dwp-src={}", + ), + _srcs( + [ + paths.join(libdir, soname + dwp_ext) + for soname, shlib, libdir in zip_shlibs(shared_libs, shared_libraries) + if shlib.lib.dwp != None + ], + format = "--dwp-dest={}", + ), + ), + ), ) - dwp_srcs_args = cmd_args(dwp_srcs_path) - cmd.add(cmd_args(dwp_srcs_args, format = "@{}")) - cmd.add(cmd_args(dwp_dests_path, format = "@{}")) + cmd.add(cmd_args(dwp_args, format = "@{}")) - debug_artifacts.extend(dwp) + for shlib, libdir in shared_libraries: + if shlib.lib.dwp != None: + debug_artifacts.append(((libdir, shlib, dwp_ext), shlib.lib.dwp)) - deps.extend([(lib.output, name) for name, lib in shared_libraries.items()]) + for shlib, _ in shared_libraries: + deps.append(shlib.lib.output) external_debug_info = project_artifacts( ctx.actions, - [lib.external_debug_info for lib in shared_libraries.values()], + [ + shlib.lib.external_debug_info + for shlib, _ in shared_libraries + ], ) # HACK: external_debug_info has an empty path - debug_artifacts.extend([(d, "") for d in external_debug_info]) + debug_artifacts.extend([("", d) for d in external_debug_info]) return (cmd, deps, debug_artifacts) def _pex_modules_args( ctx: AnalysisContext, common_args: cmd_args, - dep_artifacts: list[(ArgLike, str)], - debug_artifacts: list[(ArgLike, str)], + dep_artifacts: list[ArgLike], + debug_artifacts: list[(str | (str, SharedLibrary, str), ArgLike)], symlink_tree_path: Artifact | None, manifest_module: ArgLike | None, pex_modules: PexModules, @@ -498,16 +636,18 @@ def _pex_modules_args( runtime (this might be empty for e.g. a standalone pex). """ - cmd = cmd_args() - cmd.add(common_args) + cmd = [] + hidden = [] + + cmd.append(common_args) if manifest_module != None: - cmd.add(cmd_args(manifest_module, format = "--module-manifest={}")) + cmd.append(cmd_args(manifest_module, format = "--module-manifest={}")) if pex_modules.compile: pyc_mode = PycInvalidationMode("UNCHECKED_HASH") if symlink_tree_path == None else PycInvalidationMode("CHECKED_HASH") bytecode_manifests = pex_modules.manifests.bytecode_manifests(pyc_mode) - dep_artifacts.extend(pex_modules.manifests.bytecode_artifacts_with_paths(pyc_mode)) + dep_artifacts.extend([a[0] for a in pex_modules.manifests.bytecode_artifacts_with_paths(pyc_mode)]) bytecode_manifests_path = ctx.actions.write( "__bytecode_manifests{}.txt".format(output_suffix), @@ -516,19 +656,19 @@ def _pex_modules_args( format = "--module-manifest={}", ), ) - cmd.add(cmd_args(bytecode_manifests_path, format = "@{}")) - cmd.hidden(bytecode_manifests) + cmd.append(cmd_args(bytecode_manifests_path, format = "@{}")) + hidden.append(bytecode_manifests) if symlink_tree_path != None: - cmd.add(["--modules-dir", symlink_tree_path.as_output()]) + cmd.extend(["--modules-dir", symlink_tree_path.as_output()]) else: # Accumulate all the artifacts we depend on. Only add them to the command # if we are not going to create symlinks. - cmd.hidden([a for a, _ in dep_artifacts]) + hidden.append(dep_artifacts) - cmd.hidden([a for a, _ in debug_artifacts]) + hidden.extend([s for _, s in debug_artifacts]) - return cmd + return cmd_args(cmd, hidden = hidden) def _hidden_resources_error_message(current_target: Label, hidden_resources: list[ArgLike] | None) -> str: """ @@ -549,7 +689,8 @@ def _hidden_resources_error_message(current_target: Label, hidden_resources: lis msg = ( "Cannot package hidden srcs/resources in a standalone python_binary. " + - 'Eliminate resources in non-Python dependencies of this python binary, use `package_style = "inplace"`, ' + + 'Eliminate resources in non-Python dependencies of this python binary, set `package_style = "inplace"` on ' + + str(current_target.raw_target()) + ", " + 'use `strip_mode="full"` or turn off Split DWARF `-c fbcode.split-dwarf=false` on C++ binary resources.\n' ) @@ -578,7 +719,7 @@ def generate_startup_function_loader(ctx: AnalysisContext) -> ArgLike: else: startup_functions_list = "\n".join( [ - '"' + startup_function + '",' + "'''" + startup_function + "'''," for _, startup_function in sorted(ctx.attrs.manifest_module_entries.get("startup_functions", {}).items()) ], ) @@ -589,24 +730,36 @@ def generate_startup_function_loader(ctx: AnalysisContext) -> ArgLike: import importlib import warnings +VARS = {vars} STARTUP_FUNCTIONS=[{startup_functions_list}] +VARS["_dearg"] = lambda *args, **kwargs: (args, kwargs) + + def load_startup_functions(): - for func in STARTUP_FUNCTIONS: - mod, sep, func = func.partition(":") + for name in STARTUP_FUNCTIONS: + mod, sep, func = name.partition(":") if sep: try: + func, _, args = func.partition("(") + args, kwargs = eval("_dearg(" + args, VARS) if args else ((), {{}}) module = importlib.import_module(mod) - getattr(module, func)() + getattr(module, func)(*args, **kwargs) except Exception as e: # TODO: Ignoring errors for now. warnings.warn( - "Startup function %s (%s:%s) not executed: %s" - % (mod, name, func, e), + "Startup function '%s' (%s:%s) not executed: %s" + % (func, mod, func, e), stacklevel=1, ) - """.format(startup_functions_list = startup_functions_list), + """.format( + startup_functions_list = startup_functions_list, + vars = { + "label": repr(ctx.label), + "name": ctx.attrs.name, + }, + ), ) return ctx.actions.write_json( "manifest/startup_function_loader.manifest", @@ -635,11 +788,13 @@ def generate_manifest_module( "__module_manifests.txt", _srcs(src_manifests, format = "--module-manifest={}"), ) - cmd = cmd_args(python_toolchain.make_py_package_manifest_module) - cmd.add(["--manifest-entries", entries_json]) - cmd.add(cmd_args(src_manifests_path, format = "@{}")) - cmd.hidden(src_manifests) - cmd.add(["--output", module.as_output()]) + cmd = cmd_args( + python_toolchain.make_py_package_manifest_module, + ["--manifest-entries", entries_json], + cmd_args(src_manifests_path, format = "@{}"), + ["--output", module.as_output()], + hidden = src_manifests, + ) ctx.actions.run(cmd, category = "par", identifier = "manifest-module") json_entries_output = ctx.actions.declare_output("manifest/__manifest__.json") diff --git a/prelude/python/manifest.bzl b/prelude/python/manifest.bzl index 7a832ac38b4..3d89ea2f5f8 100644 --- a/prelude/python/manifest.bzl +++ b/prelude/python/manifest.bzl @@ -6,6 +6,11 @@ # of this source tree. load("@prelude//:artifact_tset.bzl", "project_artifacts") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", + "gen_shared_libs_action", +) load("@prelude//utils:arglike.bzl", "ArgLike") load(":toolchain.bzl", "PythonToolchainInfo") @@ -82,6 +87,33 @@ def create_manifest_for_source_map( [(dest, artifact, origin) for dest, artifact in srcs.items()], ) +def get_srcs_from_manifest( + src_manifest: [ManifestInfo, None]) -> list[Artifact]: + return [a for (a, _) in src_manifest.artifacts] if src_manifest else [] + +def create_manifest_for_shared_libs( + actions: AnalysisActions, + name: str, + shared_libs: list[SharedLibrary]) -> ManifestInfo: + """ + Generate a source manifest for the given list of sources. + """ + return ManifestInfo( + manifest = gen_shared_libs_action( + actions = actions, + out = name + ".manifest", + shared_libs = shared_libs, + gen_action = lambda actions, output, shared_libs: actions.write_json( + output, + [ + (soname, shlib.lib.output, name) + for soname, shlib in shared_libs.items() + ], + ), + ), + artifacts = [(shlib.lib.output, "") for shlib in shared_libs], + ) + def create_manifest_for_source_dir( ctx: AnalysisContext, param: str, diff --git a/prelude/python/native_python_util.bzl b/prelude/python/native_python_util.bzl index e049465d4e7..b5715536f33 100644 --- a/prelude/python/native_python_util.bzl +++ b/prelude/python/native_python_util.bzl @@ -178,8 +178,8 @@ def _write_syms_file( nm = cxx_toolchain.binary_utilities_info.nm symbols_file = ctx.actions.declare_output(name) - objects_argsfile = ctx.actions.write(name + ".objects.argsfile", objects) - objects_args = cmd_args(objects_argsfile).hidden(objects) + objects_argsfile = ctx.actions.write(name + ".py_objects_argsfile", objects) + objects_args = cmd_args(objects_argsfile, hidden = objects) script_env = { "NM": nm, diff --git a/prelude/python/prebuilt_python_library.bzl b/prelude/python/prebuilt_python_library.bzl index 3a1e1a2e30c..8b897e5fe9d 100644 --- a/prelude/python/prebuilt_python_library.bzl +++ b/prelude/python/prebuilt_python_library.bzl @@ -15,13 +15,29 @@ load( "get_excluded", "get_roots", ) +load( + "@prelude//cxx:preprocessor.bzl", + "CPreprocessor", + "CPreprocessorArgs", + "cxx_inherited_preprocessor_infos", + "cxx_merge_cpreprocessors", + "format_system_include_arg", +) load( "@prelude//linking:linkable_graph.bzl", "create_linkable_graph", "create_linkable_graph_node", ) +load( + "@prelude//third-party:build.bzl", + "create_third_party_build_root", + "prefix_from_label", + "project_from_label", +) +load("@prelude//third-party:providers.bzl", "ThirdPartyBuild", "third_party_build_info") +load("@prelude//unix:providers.bzl", "UnixEnv", "create_unix_env_info") load(":compile.bzl", "compile_manifests") -load(":manifest.bzl", "create_manifest_for_source_dir") +load(":manifest.bzl", "ManifestInfo", "create_manifest_for_source_dir") load( ":python_library.bzl", "create_python_library_info", @@ -34,8 +50,29 @@ def prebuilt_python_library_impl(ctx: AnalysisContext) -> list[Provider]: # Extract prebuilt wheel and wrap in python library provider. # TODO(nmj): Make sure all attrs are used if necessary, esp compile + entry_points = ctx.actions.declare_output("entry_points.manifest") + entry_points_dir = ctx.actions.declare_output("__entry_points__", dir = True) extracted_src = ctx.actions.declare_output("{}_extracted".format(ctx.label.name), dir = True) - ctx.actions.run([ctx.attrs._extract[RunInfo], ctx.attrs.binary_src, "--output", extracted_src.as_output()], category = "py_extract_prebuilt_library") + cmd = cmd_args( + ctx.attrs._extract[RunInfo], + ctx.attrs.binary_src, + "--output", + extracted_src.as_output(), + "--entry-points-manifest", + entry_points.as_output(), + "--entry-points", + entry_points_dir.as_output(), + ) + if ctx.attrs.strip_soabi_tags: + cmd.add("--strip-soabi-tags") + inferred_cxx_header_dirs = None + if ctx.attrs.infer_cxx_header_dirs: + inferred_cxx_header_dirs = ctx.actions.declare_output("__cxx_header_dirs__.txt") + cmd.add( + "--cxx-header-dirs", + inferred_cxx_header_dirs.as_output(), + ) + ctx.actions.run(cmd, category = "py_extract_prebuilt_library") deps, shared_deps = gather_dep_libraries(ctx.attrs.deps) src_manifest = create_manifest_for_source_dir(ctx, "binary_src", extracted_src, exclude = "\\.pyc$") bytecode = compile_manifests(ctx, [src_manifest]) @@ -50,6 +87,11 @@ def prebuilt_python_library_impl(ctx: AnalysisContext) -> list[Provider]: ) providers.append(library_info) + entry_points_manifest = ManifestInfo( + manifest = entry_points, + artifacts = [(entry_points_dir, "")], + ) + # Create, augment and provide the linkable graph. linkable_graph = create_linkable_graph( ctx, @@ -71,4 +113,100 @@ def prebuilt_python_library_impl(ctx: AnalysisContext) -> list[Provider]: deps = ctx.attrs.deps, ))) + # Allow third-party-build rules to depend on Python rules. + tp_project = project_from_label(ctx.label) + tp_prefix = prefix_from_label(ctx.label) + providers.append( + third_party_build_info( + actions = ctx.actions, + build = ThirdPartyBuild( + project = tp_project, + prefix = tp_prefix, + root = create_third_party_build_root( + ctx = ctx, + paths = [ + ("lib/python", extracted_src), + ], + manifests = [ + ("bin", entry_points_manifest), + ], + ), + manifest = ctx.actions.write_json( + "third_party_build_manifest.json", + dict( + prefix = tp_prefix, + project = tp_project, + py_lib_paths = ["lib/python"], + ), + ), + ), + deps = ctx.attrs.deps, + ), + ) + + # Unix env provider. + providers.append( + create_unix_env_info( + actions = ctx.actions, + env = UnixEnv( + label = ctx.label, + python_libs = [library_info], + binaries = [entry_points_manifest], + ), + deps = ctx.attrs.deps, + ), + ) + + # If this prebuilt wheel contains headers, export them via a C++ provider. + pp_args = [] + if ctx.attrs.cxx_header_dirs: + for header_dir in ctx.attrs.cxx_header_dirs: + pp_args.append( + format_system_include_arg( + cmd_args(extracted_src.project(header_dir)), + "clang", + ), + ) + if inferred_cxx_header_dirs != None: + pp_argsfile = ctx.actions.declare_output("__cxx_header_dirs__.py_cxx_header_argsfile") + + def write_argsfile(actions, header_dirs, output): + lines = [] + for header_dir in header_dirs.read_string().splitlines(): + lines.append(format_system_include_arg( + cmd_args(extracted_src.project(header_dir)), + "clang", + )) + actions.write(output, lines) + + ctx.actions.dynamic_output( + dynamic = [inferred_cxx_header_dirs], + inputs = [], + outputs = [pp_argsfile.as_output()], + f = lambda ctx, artifacts, outputs: write_argsfile( + ctx.actions, + artifacts[inferred_cxx_header_dirs], + outputs[pp_argsfile], + ), + ) + pp_args.append( + cmd_args( + pp_argsfile, + format = "@{}", + hidden = [extracted_src], + ), + ) + if pp_args: + providers.append(cxx_merge_cpreprocessors( + ctx = ctx, + own = [ + CPreprocessor( + args = CPreprocessorArgs( + args = pp_args, + ), + ), + ], + xs = cxx_inherited_preprocessor_infos(ctx.attrs.deps), + )) + return providers diff --git a/prelude/python/python.bzl b/prelude/python/python.bzl index 1864bdc1111..000cb6d6d2c 100644 --- a/prelude/python/python.bzl +++ b/prelude/python/python.bzl @@ -5,14 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo") load("@prelude//linking:shared_libraries.bzl", "traverse_shared_library_info") load("@prelude//utils:arglike.bzl", "ArgLike") -load("@prelude//utils:utils.bzl", "flatten") load(":compile.bzl", "PycInvalidationMode") load(":interface.bzl", "PythonLibraryInterface", "PythonLibraryManifestsInterface") load(":manifest.bzl", "ManifestInfo") -load(":toolchain.bzl", "PythonPlatformInfo", "get_platform_attr") PythonLibraryManifests = record( label = field(Label), @@ -95,24 +92,28 @@ _BYTECODE_PROJ_PREFIX = { PycInvalidationMode("UNCHECKED_HASH"): "bytecode", } +args_projections = { + "dep_artifacts": _dep_artifacts, + "dep_manifests": _dep_manifests, + "hidden_resources": _hidden_resources, + "resource_artifacts": _resource_artifacts, + "resource_manifests": _resource_manifests, + "source_artifacts": _source_artifacts, + "source_manifests": _source_manifests, + "source_type_artifacts": _source_type_artifacts, + "source_type_manifests": _source_type_manifests, +} +args_projections.update({ + "{}_artifacts".format(prefix): _bytecode_artifacts(mode) + for mode, prefix in _BYTECODE_PROJ_PREFIX.items() +}) +args_projections.update({ + "{}_manifests".format(prefix): _bytecode_manifests(mode) + for mode, prefix in _BYTECODE_PROJ_PREFIX.items() +}) + PythonLibraryManifestsTSet = transitive_set( - args_projections = dict({ - "dep_artifacts": _dep_artifacts, - "dep_manifests": _dep_manifests, - "hidden_resources": _hidden_resources, - "resource_artifacts": _resource_artifacts, - "resource_manifests": _resource_manifests, - "source_artifacts": _source_artifacts, - "source_manifests": _source_manifests, - "source_type_artifacts": _source_type_artifacts, - "source_type_manifests": _source_type_manifests, - }.items() + { - "{}_artifacts".format(prefix): _bytecode_artifacts(mode) - for mode, prefix in _BYTECODE_PROJ_PREFIX.items() - }.items() + { - "{}_manifests".format(prefix): _bytecode_manifests(mode) - for mode, prefix in _BYTECODE_PROJ_PREFIX.items() - }.items()), + args_projections = args_projections, json_projections = { "source_type_manifests_json": _source_type_manifest_jsons, }, @@ -152,11 +153,3 @@ def manifests_to_interface(manifests: PythonLibraryManifestsTSet) -> PythonLibra resource_artifacts = lambda: [manifests.project_as_args("resource_artifacts")], resource_artifacts_with_paths = lambda: [(a, p) for m in manifests.traverse() if m != None and m.resources != None for a, p in m.resources[0].artifacts], ) - -def get_python_deps(ctx: AnalysisContext): - python_platform = ctx.attrs._python_toolchain[PythonPlatformInfo] - cxx_platform = ctx.attrs._cxx_toolchain[CxxPlatformInfo] - return flatten( - [ctx.attrs.deps] + - get_platform_attr(python_platform, cxx_platform, ctx.attrs.platform_deps), - ) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index f8d86329c0a..a1473c05be8 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -10,7 +10,6 @@ load( "ArtifactGroupInfo", "ArtifactOutputs", # @unused Used as a type ) -load("@prelude//cxx:compile.bzl", "CxxSrcWithFlags") load("@prelude//cxx:cxx.bzl", "create_shared_lib_link_group_specs") load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") load("@prelude//cxx:cxx_executable.bzl", "cxx_executable") @@ -18,14 +17,15 @@ load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_is_gnu", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo") +load("@prelude//cxx:cxx_sources.bzl", "CxxSrcWithFlags") +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") load( "@prelude//cxx:cxx_types.bzl", "CxxRuleConstructorParams", ) load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load( - "@prelude//cxx:groups.bzl", + "@prelude//cxx:groups_types.bzl", "Group", "GroupAttrs", "GroupMapping", @@ -34,11 +34,14 @@ load( load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout") load( "@prelude//cxx:link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLibSpec", "build_link_group_info", "get_link_group_info", ) +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type +) load("@prelude//cxx:linker.bzl", "get_rpath_origin") load( "@prelude//cxx:omnibus.bzl", @@ -55,7 +58,6 @@ load( ) load( "@prelude//linking:link_info.bzl", - "Linkage", "LinkedObject", ) load( @@ -69,8 +71,15 @@ load( "LinkableProviders", # @unused Used as a type "linkables", ) -load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", + "create_shlib", + "merge_shared_libraries", + "traverse_shared_library_info", +) load("@prelude//linking:strip.bzl", "strip_debug_with_gnu_debuglink") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:utils.bzl", "flatten", "value_or") load("@prelude//paths.bzl", "paths") load("@prelude//resources.bzl", "gather_resources") @@ -81,7 +90,7 @@ load( "EntryPointKind", "PythonLibraryInterface", ) -load(":make_py_package.bzl", "PexModules", "PexProviders", "make_default_info", "make_py_package") +load(":make_py_package.bzl", "PexModules", "PexProviders", "make_default_info", "make_py_package", "make_run_info") load( ":manifest.bzl", "create_dep_manifest_for_source_map", @@ -94,6 +103,7 @@ load( ":python_library.bzl", "create_python_library_info", "gather_dep_libraries", + "py_attr_resources", "py_resources", "qualify_srcs", ) @@ -161,11 +171,12 @@ def _get_root_link_group_specs( name = dep.linkable_root_info.name, is_shared_lib = True, root = dep.linkable_root_info, + label = dep.linkable_graph.nodes.value.label, group = Group( name = dep.linkable_root_info.name, mappings = [ GroupMapping( - root = dep.linkable_graph.nodes.value.label, + roots = [dep.linkable_graph.nodes.value.label], traversal = Traversal("node"), ), ], @@ -188,7 +199,7 @@ def _get_root_link_group_specs( name = name, mappings = [ GroupMapping( - root = extension.linkable_graph.nodes.value.label, + roots = [extension.linkable_graph.nodes.value.label], traversal = Traversal("node"), ), ], @@ -202,15 +213,6 @@ def _get_root_link_group_specs( return specs -def _split_debuginfo(ctx, data: dict[str, (typing.Any, Label | bool)]) -> (dict[str, (LinkedObject, Label | bool)], dict[str, Artifact]): - debuginfo_artifacts = {} - transformed = {} - for name, (artifact, extra) in data.items(): - stripped_binary, debuginfo = strip_debug_with_gnu_debuglink(ctx, name, artifact.unstripped_output) - transformed[name] = LinkedObject(output = stripped_binary, unstripped_output = artifact.unstripped_output, dwp = artifact.dwp), extra - debuginfo_artifacts[name + ".debuginfo"] = debuginfo - return transformed, debuginfo_artifacts - def _get_shared_only_groups(shared_only_libs: list[LinkableProviders]) -> list[Group]: """ Create link group mappings for shared-only libs that'll force the link to @@ -228,7 +230,7 @@ def _get_shared_only_groups(shared_only_libs: list[LinkableProviders]) -> list[G name = str(dep.linkable_graph.nodes.value.label.raw_target()), mappings = [ GroupMapping( - root = dep.linkable_graph.nodes.value.label, + roots = [dep.linkable_graph.nodes.value.label], traversal = Traversal("node"), preferred_linkage = Linkage("shared"), ), @@ -325,12 +327,12 @@ def python_executable( # TODO(nmj): See if people are actually setting cxx_platform here. Really # feels like it should be a property of the python platform python_platform = ctx.attrs._python_toolchain[PythonPlatformInfo] - cxx_platform = ctx.attrs._cxx_toolchain[CxxPlatformInfo] + cxx_toolchain = ctx.attrs._cxx_toolchain raw_deps = ctx.attrs.deps raw_deps.extend(flatten( - get_platform_attr(python_platform, cxx_platform, ctx.attrs.platform_deps), + get_platform_attr(python_platform, cxx_toolchain, ctx.attrs.platform_deps), )) # `preload_deps` is used later to configure `LD_PRELOAD` environment variable, @@ -427,12 +429,13 @@ def create_dep_report( main: str, library_info: PythonLibraryInfo) -> DefaultInfo: out = ctx.actions.declare_output("dep-report.json") - cmd = cmd_args() - cmd.add(python_toolchain.traverse_dep_manifest) - cmd.add(cmd_args(main, format = "--main={}")) - cmd.add(cmd_args(out.as_output(), format = "--outfile={}")) - cmd.add(cmd_args(library_info.manifests.project_as_args("dep_manifests"))) - cmd.hidden(library_info.manifests.project_as_args("dep_artifacts")) + cmd = cmd_args( + python_toolchain.traverse_dep_manifest, + cmd_args(main, format = "--main={}"), + cmd_args(out.as_output(), format = "--outfile={}"), + cmd_args(library_info.manifests.project_as_args("dep_manifests")), + hidden = library_info.manifests.project_as_args("dep_artifacts"), + ) ctx.actions.run(cmd, category = "write_dep_report") return DefaultInfo(default_output = out) @@ -451,11 +454,6 @@ def _convert_python_library_to_executable( # Convert preloaded deps to a set of their names to be loaded by. preload_labels = {d.label: None for d in ctx.attrs.preload_deps} - preload_names = { - name: None - for name, shared_lib in library.shared_libraries().items() - if shared_lib.label in preload_labels - } extensions = {} extra_artifacts = {} @@ -463,9 +461,16 @@ def _convert_python_library_to_executable( if manifest.extensions: _merge_extensions(extensions, manifest.label, manifest.extensions) - # If we're using omnibus linking, re-link libraries and extensions and - # update the libraries we'll pull into the final binary. - if _link_strategy(ctx) == NativeLinkStrategy("merged"): + if ctx.attrs._cxx_toolchain.get(CxxToolchainInfo) == None: + # In fat target platforms, there may not be a CXX toolchain available. + shared_libs = [ + ("", shared_lib) + for shared_lib in library.shared_libraries() + ] + elif _link_strategy(ctx) == NativeLinkStrategy("merged"): + # If we're using omnibus linking, re-link libraries and extensions and + # update the libraries we'll pull into the final binary. + # Collect omnibus info from deps. linkable_graph = create_linkable_graph( ctx, @@ -494,7 +499,7 @@ def _convert_python_library_to_executable( dest: (omnibus_libs.roots[label].shared_library, label) for dest, (_, label) in extensions.items() } - native_libs = omnibus_libs.libraries + shared_libs = [("", shlib) for shlib in omnibus_libs.libraries] omnibus_providers = [] @@ -564,7 +569,7 @@ def _convert_python_library_to_executable( ] extra_preprocessors = [] if ctx.attrs.par_style == "native": - extra_preprocessors.append(CPreprocessor(relative_args = CPreprocessorArgs(args = ["-DNATIVE_PAR_STYLE=1"]))) + extra_preprocessors.append(CPreprocessor(args = CPreprocessorArgs(args = ["-DNATIVE_PAR_STYLE=1"]))) # All deps inolved in the link. link_deps = ( @@ -623,14 +628,26 @@ def _convert_python_library_to_executable( linkables(ctx.attrs.link_group_deps) ), exe_allow_cache_upload = allow_cache_upload, + compiler_flags = ctx.attrs.compiler_flags, + lang_compiler_flags = ctx.attrs.lang_compiler_flags, + platform_compiler_flags = ctx.attrs.platform_compiler_flags, + lang_platform_compiler_flags = ctx.attrs.lang_platform_compiler_flags, + preprocessor_flags = ctx.attrs.preprocessor_flags, + lang_preprocessor_flags = ctx.attrs.lang_preprocessor_flags, + platform_preprocessor_flags = ctx.attrs.platform_preprocessor_flags, + lang_platform_preprocessor_flags = ctx.attrs.lang_platform_preprocessor_flags, ) executable_info = cxx_executable(ctx, impl_params) extra["native-executable"] = [DefaultInfo(default_output = executable_info.binary, sub_targets = executable_info.sub_targets)] # Add sub-targets for libs. - for name, lib in executable_info.shared_libs.items(): - extra[name] = [DefaultInfo(default_output = lib.output)] + for shlib in executable_info.shared_libs: + # TODO(agallagher) There appears to be pre-existing soname conflicts + # when building this (when using link groups), which prevents using + # `with_unique_str_sonames`. + if shlib.soname.is_str: + extra[shlib.soname.ensure_str()] = [DefaultInfo(default_output = shlib.lib.output)] for name, group in executable_info.auto_link_groups.items(): extra[name] = [DefaultInfo(default_output = group.output)] @@ -646,23 +663,29 @@ def _convert_python_library_to_executable( # Put native libraries into the runtime location, as we need to unpack # potentially all of them before startup. - native_libs = { - paths.join("runtime", "lib", name): lib - for name, lib in executable_info.shared_libs.items() - } - preload_names = [paths.join("runtime", "lib", n) for n in preload_names] + shared_libs = [("runtime/lib", s) for s in executable_info.shared_libs] # TODO expect(len(executable_info.runtime_files) == 0, "OH NO THERE ARE RUNTIME FILES") extra_artifacts.update(dict(extension_info.artifacts)) - native_libs["runtime/bin/{}".format(ctx.attrs.executable_name)] = LinkedObject( - output = executable_info.binary, - unstripped_output = executable_info.binary, - dwp = executable_info.dwp, - ) + shared_libs.append(( + "runtime/bin", + create_shlib( + soname = ctx.attrs.executable_name, + label = ctx.label, + lib = LinkedObject( + output = executable_info.binary, + unstripped_output = executable_info.binary, + dwp = executable_info.dwp, + ), + ), + )) extra_artifacts["static_extension_finder.py"] = ctx.attrs.static_extension_finder else: - native_libs = {name: shared_lib.lib for name, shared_lib in library.shared_libraries().items()} + shared_libs = [ + ("", shared_lib) + for shared_lib in library.shared_libraries() + ] if dbg_source_db: extra_artifacts["dbg-db.json"] = dbg_source_db.default_outputs[0] @@ -672,28 +695,71 @@ def _convert_python_library_to_executable( extra_manifests = create_manifest_for_source_map(ctx, "extra_manifests", extra_artifacts) - shared_libraries = {} - debuginfo_artifacts = {} - # Create the map of native libraries to their artifacts and whether they # need to be preloaded. Note that we merge preload deps into regular deps # above, before gathering up all native libraries, so we're guaranteed to # have all preload libraries (and their transitive deps) here. - for name, lib in native_libs.items(): - shared_libraries[name] = lib, name in preload_names + shared_libs = [ + (libdir, shlib, shlib.label in preload_labels) + for libdir, shlib in shared_libs + ] # Strip native libraries and extensions and update the .gnu_debuglink references if we are extracting # debug symbols from the par + debuginfo_files = [] + debuginfos = {} if ctx.attrs.strip_libpar == "extract" and package_style == PackageStyle("standalone") and cxx_is_gnu(ctx): - shared_libraries, library_debuginfo = _split_debuginfo(ctx, shared_libraries) - extensions, extension_debuginfo = _split_debuginfo(ctx, extensions) - debuginfo_artifacts = library_debuginfo | extension_debuginfo + stripped_shlibs = [] + for libdir, shlib, preload in shared_libs: + name = paths.join( + libdir, + value_or( + shlib.soname.as_str(), + shlib.lib.unstripped_output.short_path, + ), + ) + existing = debuginfos.get(name) + if existing == None: + stripped, debuginfo = strip_debug_with_gnu_debuglink( + ctx = ctx, + name = name, + obj = shlib.lib.unstripped_output, + ) + debuginfos[name] = (stripped, debuginfo) + else: + stripped, debuginfo = existing + shlib = SharedLibrary( + soname = shlib.soname, + label = shlib.label, + lib = LinkedObject( + output = stripped, + unstripped_output = shlib.lib.unstripped_output, + dwp = shlib.lib.dwp, + ), + ) + stripped_shlibs.append((libdir, shlib, preload)) + debuginfo_files.append(((libdir, shlib, ".debuginfo"), debuginfo)) + shared_libs = stripped_shlibs + for name, (extension, label) in extensions.items(): + stripped, debuginfo = strip_debug_with_gnu_debuglink( + ctx = ctx, + name = name, + obj = extension.unstripped_output, + ) + extensions[name] = ( + LinkedObject( + output = stripped, + unstripped_output = extension.unstripped_output, + dwp = extension.dwp, + ), + label, + ) + debuginfo_files.append((name + ".debuginfo", debuginfo)) # Combine sources and extensions into a map of all modules. pex_modules = PexModules( manifests = library.manifests(), extra_manifests = extra_manifests, - debuginfo_manifest = create_manifest_for_source_map(ctx, "debuginfo", debuginfo_artifacts) if debuginfo_artifacts else None, compile = compile, extensions = create_manifest_for_extensions( ctx, @@ -706,16 +772,17 @@ def _convert_python_library_to_executable( # Build the PEX. pex = make_py_package( - ctx, - python_toolchain, - ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, - package_style, - ctx.attrs.build_args, - pex_modules, - shared_libraries, - main, - hidden_resources, - allow_cache_upload, + ctx = ctx, + python_toolchain = python_toolchain, + make_py_package_cmd = ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, + package_style = package_style, + build_args = ctx.attrs.build_args, + pex_modules = pex_modules, + shared_libraries = shared_libs, + main = main, + hidden_resources = hidden_resources, + allow_cache_upload = allow_cache_upload, + debuginfo_files = debuginfo_files, ) pex.sub_targets.update(extra) @@ -757,16 +824,17 @@ def python_binary_impl(ctx: AnalysisContext) -> list[Provider]: if ctx.attrs.main != None: srcs[ctx.attrs.main.short_path] = ctx.attrs.main srcs = qualify_srcs(ctx.label, ctx.attrs.base_module, srcs) + resources = qualify_srcs(ctx.label, ctx.attrs.base_module, py_attr_resources(ctx)) pex = python_executable( ctx, main, srcs, - {}, + resources, compile = value_or(ctx.attrs.compile, False), allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs), ) return [ make_default_info(pex), - RunInfo(pex.run_cmd), + make_run_info(pex, ctx.attrs.run_with_inplace), ] diff --git a/prelude/python/python_library.bzl b/prelude/python/python_library.bzl index 16ff1ff412b..dc86ef03c83 100644 --- a/prelude/python/python_library.bzl +++ b/prelude/python/python_library.bzl @@ -17,7 +17,6 @@ load( "gather_resources", ) load("@prelude//cxx:cxx_link_utility.bzl", "shared_libs_symlink_tree_name") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo") load( "@prelude//cxx:omnibus.bzl", "get_excluded", @@ -35,6 +34,14 @@ load( ) load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "merge_shared_libraries") load("@prelude//python:toolchain.bzl", "PythonPlatformInfo", "get_platform_attr") +load( + "@prelude//third-party:build.bzl", + "create_third_party_build_root", + "prefix_from_label", + "project_from_label", +) +load("@prelude//third-party:providers.bzl", "ThirdPartyBuild", "third_party_build_info") +load("@prelude//unix:providers.bzl", "UnixEnv", "create_unix_env_info") load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "flatten", "from_named_set") @@ -195,19 +202,22 @@ def _exclude_deps_from_omnibus( def _attr_srcs(ctx: AnalysisContext) -> dict[str, Artifact]: python_platform = ctx.attrs._python_toolchain[PythonPlatformInfo] - cxx_platform = ctx.attrs._cxx_toolchain[CxxPlatformInfo] + cxx_toolchain = ctx.attrs._cxx_toolchain all_srcs = {} all_srcs.update(from_named_set(ctx.attrs.srcs)) - for srcs in get_platform_attr(python_platform, cxx_platform, ctx.attrs.platform_srcs): + for srcs in get_platform_attr(python_platform, cxx_toolchain, ctx.attrs.platform_srcs): all_srcs.update(from_named_set(srcs)) return all_srcs def _attr_resources(ctx: AnalysisContext) -> dict[str, Artifact | Dependency]: python_platform = ctx.attrs._python_toolchain[PythonPlatformInfo] - cxx_platform = ctx.attrs._cxx_toolchain[CxxPlatformInfo] + cxx_toolchain = ctx.attrs._cxx_toolchain all_resources = {} all_resources.update(from_named_set(ctx.attrs.resources)) - for resources in get_platform_attr(python_platform, cxx_platform, ctx.attrs.platform_resources): + + # `python_binary` doesn't have `platform_resources` + platform_resources = getattr(ctx.attrs, "platform_resources", []) + for resources in get_platform_attr(python_platform, cxx_toolchain, platform_resources): all_resources.update(from_named_set(resources)) return all_resources @@ -262,7 +272,7 @@ def python_library_impl(ctx: AnalysisContext) -> list[Provider]: expect(not ctx.attrs.versioned_resources) python_platform = ctx.attrs._python_toolchain[PythonPlatformInfo] - cxx_platform = ctx.attrs._cxx_toolchain[CxxPlatformInfo] + cxx_toolchain = ctx.attrs._cxx_toolchain providers = [] sub_targets = {} @@ -290,15 +300,16 @@ def python_library_impl(ctx: AnalysisContext) -> list[Provider]: raw_deps = ctx.attrs.deps raw_deps.extend(flatten( - get_platform_attr(python_platform, cxx_platform, ctx.attrs.platform_deps), + get_platform_attr(python_platform, cxx_toolchain, ctx.attrs.platform_deps), )) + resource_manifest = py_resources(ctx, resources) if resources else None deps, shared_libraries = gather_dep_libraries(raw_deps) library_info = create_python_library_info( ctx.actions, ctx.label, srcs = src_manifest, src_types = src_type_manifest, - resources = py_resources(ctx, resources) if resources else None, + resources = resource_manifest, bytecode = bytecode, dep_manifest = dep_manifest, deps = deps, @@ -306,6 +317,49 @@ def python_library_impl(ctx: AnalysisContext) -> list[Provider]: ) providers.append(library_info) + providers.append( + create_unix_env_info( + actions = ctx.actions, + env = UnixEnv( + label = ctx.label, + python_libs = [library_info], + ), + deps = raw_deps, + ), + ) + + # Allow third-party-build rules to depend on Python rules. + tp_project = project_from_label(ctx.label) + tp_prefix = prefix_from_label(ctx.label) + providers.append( + third_party_build_info( + actions = ctx.actions, + build = ThirdPartyBuild( + # TODO(agallagher): Figure out a way to get a unique name? + project = tp_project, + prefix = tp_prefix, + root = create_third_party_build_root( + ctx = ctx, + # TODO(agallagher): use constraints to get py version. + manifests = ( + [("lib/python", src_manifest)] if src_manifest != None else [] + ) + ( + [("lib/python", resource_manifest[0])] if resource_manifest != None else [] + ), + ), + manifest = ctx.actions.write_json( + "third_party_build_manifest.json", + dict( + project = tp_project, + prefix = tp_prefix, + py_lib_paths = ["lib/python"], + ), + ), + ), + deps = raw_deps, + ), + ) + providers.append(create_python_needed_coverage_info(ctx.label, ctx.attrs.base_module, srcs.keys())) # Source DBs. diff --git a/prelude/python/python_test.bzl b/prelude/python/python_test.bzl index bd55642c62e..1c00caa09ce 100644 --- a/prelude/python/python_test.bzl +++ b/prelude/python/python_test.bzl @@ -14,6 +14,11 @@ load("@prelude//utils:utils.bzl", "from_named_set", "value_or") load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") load(":interface.bzl", "EntryPointKind") load(":make_py_package.bzl", "PexProviders", "make_default_info") +load( + ":manifest.bzl", + "get_srcs_from_manifest", +) +load(":python.bzl", "PythonLibraryInfo") load(":python_binary.bzl", "python_executable") load(":python_library.bzl", "py_attr_resources", "qualify_srcs") @@ -38,8 +43,10 @@ def python_test_executable(ctx: AnalysisContext) -> PexProviders: main_module = value_or(ctx.attrs.main_module, "__test_main__") srcs = qualify_srcs(ctx.label, ctx.attrs.base_module, from_named_set(ctx.attrs.srcs)) + if ctx.attrs.implicit_test_library != None: + top_level_manifest = list(ctx.attrs.implicit_test_library[PythonLibraryInfo].manifests.traverse(ordering = "preorder"))[0] + srcs.update(qualify_srcs(ctx.label, ctx.attrs.base_module, from_named_set(get_srcs_from_manifest(top_level_manifest.srcs)))) - # Generate the test modules file and add it to sources. test_modules_name, test_modules_path = _write_test_modules_list(ctx, srcs) srcs[test_modules_name] = test_modules_path diff --git a/prelude/python/python_wheel.bzl b/prelude/python/python_wheel.bzl new file mode 100644 index 00000000000..f94dc637176 --- /dev/null +++ b/prelude/python/python_wheel.bzl @@ -0,0 +1,233 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load( + "@prelude//cxx:link.bzl", + "cxx_link_shared_library", +) +load( + "@prelude//cxx:link_types.bzl", + "link_options", +) +load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") +load( + "@prelude//linking:link_info.bzl", + "LinkArgs", + "LinkStrategy", + "get_lib_output_style", + "get_link_info", +) +load( + "@prelude//linking:linkable_graph.bzl", + "LinkableGraph", + "LinkableNode", # @unused Used as a type + "LinkableRootInfo", + "get_deps_for_link", + "get_linkable_graph_node_map_func", + get_link_info_for_node = "get_link_info", +) +load("@prelude//python:manifest.bzl", "create_manifest_for_entries") +load("@prelude//python:python.bzl", "PythonLibraryInfo") +load("@prelude//python:toolchain.bzl", "PythonToolchainInfo") +load("@prelude//utils:expect.bzl", "expect") +load( + "@prelude//utils:graph_utils.bzl", + "depth_first_traversal_by", +) +load("@prelude//decls/toolchains_common.bzl", "toolchains_common") +load("@prelude//transitions/constraint_overrides.bzl", "constraint_overrides_transition") + +def _link_deps( + link_infos: dict[Label, LinkableNode], + deps: list[Label], + link_strategy: LinkStrategy, + pic_behavior: PicBehavior) -> list[Label]: + """ + Return transitive deps required to link dynamically against the given deps. + This will following through deps of statically linked inputs and exported + deps of everything else (see https://fburl.com/diffusion/rartsbkw from v1). + """ + + def find_deps(node: Label): + return get_deps_for_link(link_infos[node], link_strategy, pic_behavior) + + return depth_first_traversal_by(link_infos, deps, find_deps) + +def _impl(ctx: AnalysisContext) -> list[Provider]: + providers = [] + + cmd = [] + hidden = [] + + cmd.append(ctx.attrs._wheel[RunInfo]) + + name_parts = [ + ctx.attrs.dist or ctx.attrs.name, + ctx.attrs.version, + ctx.attrs.python, + ctx.attrs.abi, + ctx.attrs.platform, + ] + wheel = ctx.actions.declare_output("{}.whl".format("-".join(name_parts))) + cmd.append(cmd_args(wheel.as_output(), format = "--output={}")) + + cmd.append("--name={}".format(ctx.attrs.dist or ctx.attrs.name)) + cmd.append("--version={}".format(ctx.attrs.version)) + + if ctx.attrs.entry_points: + cmd.append("--entry-points={}".format(json.encode(ctx.attrs.entry_points))) + + for key, val in ctx.attrs.extra_metadata.items(): + cmd.extend(["--metadata", key, val]) + + cmd.extend(["--metadata", "Requires-Python", "=={}.*".format(ctx.attrs.python[2:])]) + + for requires in ctx.attrs.requires: + cmd.extend(["--metadata", "Requires-Dist", requires]) + + for name, script in ctx.attrs.scripts.items(): + cmd.extend(["--data", paths.join("scripts", name), script]) + + libraries = {} + for lib in ctx.attrs.libraries: + libraries[lib.label] = lib + if ctx.attrs.libraries_query != None: + for lib in ctx.attrs.libraries_query: + if PythonLibraryInfo in lib: + libraries[lib.label] = lib + + srcs = [] + extensions = {} + for dep in libraries.values(): + manifests = dep[PythonLibraryInfo].manifests.value + if manifests.srcs != None: + srcs.append(manifests.srcs) + if manifests.resources != None: + expect(not manifests.resources[1]) + srcs.append(manifests.resources[0]) + if manifests.extensions != None: + python_toolchain = ctx.attrs._python_toolchain[PythonToolchainInfo] + toolchain_info = get_cxx_toolchain_info(ctx) + items = manifests.extensions.items() + expect(len(items) == 1) + extension = items[0][0] + root = dep[LinkableRootInfo] + + # Add link inputs for the linkable root and any deps. + inputs = [] + inputs.append(get_link_info( + infos = root.link_infos, + prefer_stripped = ctx.attrs.prefer_stripped_objects, + )) + link_infos = get_linkable_graph_node_map_func(dep[LinkableGraph])() + for ext_dep in _link_deps( + link_infos, + root.deps, + LinkStrategy("static_pic"), + toolchain_info.pic_behavior, + ): + node = link_infos[ext_dep] + output_style = get_lib_output_style( + LinkStrategy("static_pic"), + node.preferred_linkage, + toolchain_info.pic_behavior, + ) + inputs.append(get_link_info_for_node( + node, + output_style, + prefer_stripped = ctx.attrs.prefer_stripped_objects, + )) + + # link the rule + link_result = cxx_link_shared_library( + ctx = ctx, + output = extension, + opts = link_options( + links = [ + LinkArgs(flags = python_toolchain.extension_linker_flags), + LinkArgs(flags = python_toolchain.wheel_linker_flags), + LinkArgs(infos = inputs), + ], + category_suffix = "native_extension", + identifier = extension, + link_execution_preference = LinkExecutionPreference("any"), + ), + ) + extensions[extension] = link_result.linked_object + + if extensions: + srcs.append( + create_manifest_for_entries( + ctx, + name = "extensions.txt", + entries = [ + (name, extension.output, "") + for name, extension in extensions.items() + ], + ), + ) + + for manifest in srcs: + cmd.append(cmd_args(manifest.manifest, format = "--srcs={}")) + for a, _ in manifest.artifacts: + hidden.append(a) + + ctx.actions.run(cmd_args(cmd, hidden = hidden), category = "wheel") + providers.append(DefaultInfo(default_output = wheel)) + + return providers + +python_wheel = rule( + impl = _impl, + cfg = constraint_overrides_transition, + attrs = dict( + dist = attrs.option(attrs.string(), default = None), + version = attrs.string(default = "1.0.0"), + python = attrs.string( + default = select({ + "ovr_config//third-party/python/constraints:3.10": "py3.10", + "ovr_config//third-party/python/constraints:3.11": "py3.11", + "ovr_config//third-party/python/constraints:3.12": "py3.12", + "ovr_config//third-party/python/constraints:3.8": "py3.8", + "ovr_config//third-party/python/constraints:3.9": "py3.9", + }), + ), + entry_points = attrs.dict( + key = attrs.string(), + value = attrs.dict( + key = attrs.string(), + value = attrs.string(), + ), + default = {}, + ), + requires = attrs.list(attrs.string(), default = []), + extra_metadata = attrs.dict( + key = attrs.string(), + value = attrs.string(), + default = {}, + ), + abi = attrs.string(default = "none"), + platform = attrs.string( + default = select({ + "DEFAULT": "any", + "ovr_config//os:linux-arm64": "linux_aarch64", + "ovr_config//os:linux-x86_64": "linux_x86_64", + }), + ), + constraint_overrides = attrs.list(attrs.string(), default = []), + libraries = attrs.list(attrs.dep(providers = [PythonLibraryInfo]), default = []), + scripts = attrs.dict(key = attrs.string(), value = attrs.source(), default = {}), + libraries_query = attrs.option(attrs.query(), default = None), + prefer_stripped_objects = attrs.default_only(attrs.bool(default = False)), + _wheel = attrs.default_only(attrs.exec_dep(default = "prelude//python/tools:wheel")), + _cxx_toolchain = toolchains_common.cxx(), + _python_toolchain = toolchains_common.python(), + ), +) diff --git a/prelude/python/runtime/BUCK.v2 b/prelude/python/runtime/BUCK.v2 index 1cac267a37f..dbf6fa6b73f 100644 --- a/prelude/python/runtime/BUCK.v2 +++ b/prelude/python/runtime/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + filegroup( name = "bootstrap_files", srcs = glob(["__par__/**/*.py"]), diff --git a/prelude/python/source_db.bzl b/prelude/python/source_db.bzl index c799e576de4..4d299a03d05 100644 --- a/prelude/python/source_db.bzl +++ b/prelude/python/source_db.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//python:python.bzl", "PythonLibraryInfo") +load("@prelude//utils:argfile.bzl", "at_argfile") load( ":manifest.bzl", "ManifestInfo", # @unused Used as a type @@ -41,10 +42,12 @@ def create_source_db( dep_manifests = ctx.actions.tset(PythonLibraryManifestsTSet, children = [d.manifests for d in python_deps]) dependencies = cmd_args(dep_manifests.project_as_args("source_type_manifests"), format = "--dependency={}") - dependencies_file = ctx.actions.write("source_db_dependencies", dependencies) - dependencies_file = cmd_args(dependencies_file, format = "@{}").hidden(dependencies) + cmd.add(at_argfile( + actions = ctx.actions, + name = "source_db_dependencies", + args = dependencies, + )) - cmd.add(dependencies_file) artifacts.append(dep_manifests.project_as_args("source_type_artifacts")) ctx.actions.run(cmd, category = "py_source_db") @@ -71,9 +74,12 @@ def create_dbg_source_db( dep_manifests = ctx.actions.tset(PythonLibraryManifestsTSet, children = [d.manifests for d in python_deps]) dependencies = cmd_args(dep_manifests.project_as_args("source_manifests"), format = "--dependency={}") - dependencies_file = ctx.actions.write("dbg_source_db_dependencies", dependencies) - dependencies_file = cmd_args(dependencies_file, format = "@{}").hidden(dependencies) - cmd.add(dependencies_file) + cmd.add(at_argfile( + actions = ctx.actions, + name = "dbg_source_db_dependencies", + args = dependencies, + )) + artifacts.append(dep_manifests.project_as_args("source_artifacts")) ctx.actions.run(cmd, category = "py_dbg_source_db") diff --git a/prelude/python/sourcedb/build.bxl b/prelude/python/sourcedb/build.bxl index 13cbf9c8550..81ded5ae7ff 100644 --- a/prelude/python/sourcedb/build.bxl +++ b/prelude/python/sourcedb/build.bxl @@ -12,22 +12,34 @@ def _get_artifact(result: bxl.BuildResult) -> Artifact: return artifact fail("Sourcedb rule must have at least one artifact") +def _get_sourcedb(result: list[bxl.EnsuredArtifact]) -> bxl.EnsuredArtifact: + # NOTE: the first artifact is always the source db json + # T124989384 will make this nicer + for artifact in result: + return artifact + fail("Sourcedb rule must have at least one artifact") + def _abort_on_build_failure(target_label: TargetLabel, result: bxl.BuildResult) -> None: for failure in result.failures(): error_message = "Target `{}` cannot be built by Buck\nreason: {}".format(target_label, failure) fail(error_message) -# Build sourcedb for the given targets, and return a mapping from target names -# to the corresponding sourcedb JSON file location. -def do_build( +def _build( ctx: bxl.Context, - targets: list[ConfiguredTargetLabel]) -> dict[TargetLabel, Artifact]: + targets: list[ConfiguredTargetLabel]) -> dict[Label, bxl.BuildResult]: # Build sourcedbs of all targets configured_sub_targets = [ target.with_sub_target(["source-db-no-deps"]) for target in targets ] - build_results = ctx.build(configured_sub_targets) + return ctx.build(configured_sub_targets) + +# Build sourcedb for the given targets, and return a mapping from target names +# to the corresponding sourcedb JSON file location. +def do_build( + ctx: bxl.Context, + targets: list[ConfiguredTargetLabel]) -> dict[TargetLabel, Artifact]: + build_results = _build(ctx, targets) # Compute result dict output = {} @@ -37,3 +49,14 @@ def do_build( path = _get_artifact(result) output[raw_target] = path return output + +# Same as do_build, except calls ensure to ensure artifacts are materialized. +def do_build_ensured( + ctx: bxl.Context, + targets: list[ConfiguredTargetLabel]) -> dict[TargetLabel, bxl.EnsuredArtifact]: + build_results = _build(ctx, targets) + for key, value in build_results.items(): + _abort_on_build_failure(key.raw_target(), value) + + ensured_artifacts = ctx.output.ensure_multiple(build_results) + return {label.raw_target(): _get_sourcedb(artifact) for label, artifact in ensured_artifacts.items()} diff --git a/prelude/python/sourcedb/filter.bxl b/prelude/python/sourcedb/filter.bxl index 849d8db228d..f2ad4f87ec7 100644 --- a/prelude/python/sourcedb/filter.bxl +++ b/prelude/python/sourcedb/filter.bxl @@ -14,7 +14,7 @@ BUCK_PYTHON_RULE_KIND_QUERY = "|".join(BUCK_PYTHON_RULE_KINDS) def filter_root_targets( query: bxl.UqueryContext, - target_patterns: typing.Any) -> bxl.TargetSet: + target_patterns: typing.Any) -> bxl.UnconfiguredTargetSet: # Find all Pure-Python targets candidate_targets = utarget_set() for pattern in target_patterns: diff --git a/prelude/python/sourcedb/ide.bxl b/prelude/python/sourcedb/ide.bxl new file mode 100644 index 00000000000..af0661f92d5 --- /dev/null +++ b/prelude/python/sourcedb/ide.bxl @@ -0,0 +1,51 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":build.bxl", "do_build_ensured") +load(":query.bxl", "do_query") + +def _build_entry_point(ctx: bxl.Context) -> None: + bxl_actions = ctx.bxl_actions( + target_platform = "prelude//platforms:default", + ) + actions = bxl_actions.actions + + query = ctx.uquery() + + output = {} + for source in ctx.cli_args.source: + owning_targets = query.owner(source) + targets_configured = do_query(ctx, query, actions, owning_targets) + targets_configured += [target.label for target in ctx.configured_targets(ctx.cli_args.extra_source_targets)] + + sourcedbs = do_build_ensured(ctx, targets_configured) + + output[source] = {"db": sourcedbs, "owning_targets": [target.label for target in owning_targets]} + + ctx.output.print_json(output) + +build = bxl_main( + doc = """Build Python sourcedb for Python IDE support. + + It takes a list of file paths, and will find the owner targets for all + those files and build source-db for those owning targets, returning them all. + """, + impl = _build_entry_point, + cli_args = { + "extra-source-targets": cli_args.list( + cli_args.string( + doc = "fully qualified targets to include in the sourcedb", + ), + [], # default value + ), + "source": cli_args.list( + cli_args.string( + doc = "File to build a source db for (relative to source root)", + ), + ), + }, +) diff --git a/prelude/python/sourcedb/merge.bxl b/prelude/python/sourcedb/merge.bxl index af6219f45cd..525bdf89280 100644 --- a/prelude/python/sourcedb/merge.bxl +++ b/prelude/python/sourcedb/merge.bxl @@ -17,13 +17,14 @@ def do_merge( dependency_key = bxl_actions.exec_deps.keys()[0] - command = cmd_args(bxl_actions.exec_deps[dependency_key][RunInfo]) - command.add(merger_input) - command.add("--output") - command.add(merger_output.as_output()) - - # Declare that the merger result depends on all sourcedbs - command.hidden(built_sourcedbs.values()) + command = cmd_args( + bxl_actions.exec_deps[dependency_key][RunInfo], + merger_input, + "--output", + merger_output.as_output(), + # Declare that the merger result depends on all sourcedbs + hidden = built_sourcedbs.values(), + ) actions.run(command, category = command_category) return ctx.output.ensure(merger_output) diff --git a/prelude/python/sourcedb/owners.bxl b/prelude/python/sourcedb/owners.bxl new file mode 100644 index 00000000000..deeefbf7e0a --- /dev/null +++ b/prelude/python/sourcedb/owners.bxl @@ -0,0 +1,41 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":filter.bxl", "filter_root_targets") + +def _owners_entry_point(ctx: bxl.Context) -> None: + query = ctx.uquery() + root = ctx.root() + + owning_targets = filter_root_targets(query, query.owner(ctx.cli_args.source)) + + files = [] + if len(owning_targets) > 0: + target = owning_targets[0] + files = query.inputs(target) + cell_root = ctx.audit().cell([target.label.cell])[target.label.cell] + files = ["{}/{}".format(cell_root, file.path) for file in files] + + ctx.output.print_json({"files": files, "owning_targets": [target.label for target in owning_targets], "root": root}) + +build = bxl_main( + doc = """Determines owning python targets and root, providing files within the first owning target. + Note: must be run from within fbsource. + + It takes a file path, returning an object of format + `{'owning_targets': List, 'root': string, 'files': List}` + - Owning targets is the list of python target labels that own the file. + - Root is the buck project root. + - Files is the list of files (absolute paths) within the first owning target, if any. + """, + impl = _owners_entry_point, + cli_args = { + "source": cli_args.string( + doc = "Source file (absolute path)", + ), + }, +) diff --git a/prelude/python/sourcedb/query.bxl b/prelude/python/sourcedb/query.bxl index 26d4b51ac68..bc3a8c4aa6c 100644 --- a/prelude/python/sourcedb/query.bxl +++ b/prelude/python/sourcedb/query.bxl @@ -21,7 +21,7 @@ def _get_python_library_manifests_from_analysis_result( def _get_python_library_manifests_from_targets( ctx: bxl.Context, - targets: bxl.TargetSet) -> list[PythonLibraryManifestsTSet]: + targets: bxl.UnconfiguredTargetSet) -> list[PythonLibraryManifestsTSet]: return filter(None, [ _get_python_library_manifests_from_analysis_result(analysis_result) for analysis_result in ctx.analysis(targets).values() @@ -30,7 +30,7 @@ def _get_python_library_manifests_from_targets( def get_python_library_manifests_tset_from_targets( ctx: bxl.Context, actions: AnalysisActions, - root_targets: bxl.TargetSet) -> PythonLibraryManifestsTSet: + root_targets: bxl.UnconfiguredTargetSet) -> PythonLibraryManifestsTSet: return actions.tset( PythonLibraryManifestsTSet, children = _get_python_library_manifests_from_targets(ctx, root_targets), diff --git a/prelude/python/sourcedb/typing_query.bxl b/prelude/python/sourcedb/typing_query.bxl new file mode 100644 index 00000000000..a5eb7845f46 --- /dev/null +++ b/prelude/python/sourcedb/typing_query.bxl @@ -0,0 +1,55 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//python/sourcedb/filter.bxl", "BUCK_PYTHON_RULE_KIND_QUERY") + +def get_owners_for_files( + query: bxl.UqueryContext, + sources: list[str]) -> dict[str, bxl.UnconfiguredTargetSet]: + return {source: query.owner(source) for source in sources} + +def has_any_python_targets_with_typing( + query: bxl.UqueryContext, + owners: bxl.UnconfiguredTargetSet) -> bool: + targets_with_typing = query.attrfilter("typing", "True", owners) + + python_targets_with_typing = query.kind( + BUCK_PYTHON_RULE_KIND_QUERY, + targets_with_typing, + ) + + return len(python_targets_with_typing) != 0 + +def get_files_per_target_typed( + query: bxl.UqueryContext, + sources: list[str]) -> dict[str, bool]: + files_to_owners = get_owners_for_files(query, sources) + + return { + file: has_any_python_targets_with_typing(query, owners) + for file, owners in files_to_owners.items() + } + +def _do_typing_query_entry_point(ctx: bxl.Context) -> None: + query = ctx.uquery() + files_per_target_typed = get_files_per_target_typed(query, ctx.cli_args.source) + ctx.output.print_json(files_per_target_typed) + +typing_query = bxl_main( + doc = ( + "Queries Buck about a given file to determine if any owning targets have typing " + + "in their attributes." + ), + impl = _do_typing_query_entry_point, + cli_args = { + "source": cli_args.list( + cli_args.string( + doc = "The absolute path to a file you are trying to get typing attributes of", + ), + ), + }, +) diff --git a/prelude/python/toolchain.bzl b/prelude/python/toolchain.bzl index 62bd8be3629..3a0e4260b0c 100644 --- a/prelude/python/toolchain.bzl +++ b/prelude/python/toolchain.bzl @@ -50,17 +50,21 @@ PythonToolchainInfo = provider( "default_sitecustomize": provider_field(typing.Any, default = None), # The interpreter to use to compile bytecode. "host_interpreter": provider_field(typing.Any, default = None), + "bundled_interpreter": provider_field(typing.Any, default = None), "interpreter": provider_field(typing.Any, default = None), "version": provider_field(typing.Any, default = None), "native_link_strategy": provider_field(typing.Any, default = None), "linker_flags": provider_field(typing.Any, default = None), "binary_linker_flags": provider_field(typing.Any, default = None), + "extension_linker_flags": provider_field(typing.Any, default = None), + "wheel_linker_flags": provider_field(list[typing.Any], default = []), "generate_static_extension_info": provider_field(typing.Any, default = None), "parse_imports": provider_field(typing.Any, default = None), "traverse_dep_manifest": provider_field(typing.Any, default = None), "package_style": provider_field(typing.Any, default = None), "strip_libpar": provider_field(typing.Any, default = None), "make_source_db": provider_field(typing.Any, default = None), + "native_library_runtime_paths": provider_field(list[str], default = []), "make_source_db_no_deps": provider_field(typing.Any, default = None), "make_py_package_inplace": provider_field(typing.Any, default = None), "make_py_package_standalone": provider_field(typing.Any, default = None), @@ -93,14 +97,19 @@ def get_package_style(ctx: AnalysisContext) -> PackageStyle: def get_platform_attr( python_platform_info: PythonPlatformInfo, - cxx_platform_info: CxxPlatformInfo, + cxx_toolchain: Dependency, xs: list[(str, typing.Any)]) -> list[typing.Any]: """ Take a platform_* value, and the non-platform version, and concat into a list of values based on the cxx/python platform """ + if len(xs) == 0: + return [] + cxx_info = cxx_toolchain.get(CxxPlatformInfo) + if cxx_info == None: + fail("Cannot use platform attrs in a fat platform configuration") python_platform = python_platform_info.name - cxx_platform = cxx_platform_info.name + cxx_platform = cxx_info.name return by_platform([python_platform, cxx_platform], xs) python = struct( diff --git a/prelude/python/tools/BUCK.v2 b/prelude/python/tools/BUCK.v2 index 1e325372f54..6463b5c0181 100644 --- a/prelude/python/tools/BUCK.v2 +++ b/prelude/python/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( @@ -126,3 +132,9 @@ prelude.python_bootstrap_binary( main = "fail_with_message.py", visibility = ["PUBLIC"], ) + +prelude.python_bootstrap_binary( + name = "wheel", + main = "wheel.py", + visibility = ["PUBLIC"], +) diff --git a/prelude/python/tools/__test_main__.py b/prelude/python/tools/__test_main__.py index d699a7e9cc5..b8c3cb6c7df 100644 --- a/prelude/python/tools/__test_main__.py +++ b/prelude/python/tools/__test_main__.py @@ -32,7 +32,6 @@ import time import traceback import unittest -import warnings from importlib.machinery import PathFinder @@ -117,7 +116,8 @@ class DebugWipeLoader(SourceFileLoader): def get_code(self, fullname): code = super().get_code(fullname) - if code: + # This can segfault in 3.12 + if code and sys.version_info < (3, 12): # Ideally we'd do # code.co_lnotab = b'' # But code objects are READONLY. Not to worry though; we'll @@ -258,7 +258,14 @@ def stopTest(self, test): # test cases, and fall back to looking the test up from the suite # otherwise. if not hasattr(test, "_testMethodName"): - test = self._find_next_test(self._suite) + potential_test = self._find_next_test(self._suite) + + if potential_test is not None: + test = potential_test + elif hasattr(test, "id"): + # If the next test can't be found, this could be a failure in class teardown. Fallback + # to using the id, which will likely be the method name as the test method. + test._testMethodName = test.id() self._results.append( { @@ -428,6 +435,30 @@ def getTestCaseNames(self, testCaseClass): matched.append(attrname) return matched + def loadTestsFromName(self, name, module=None): + """ + Tries to find and import the module from `name` and discover test cases inside. + + NOTE: this function is used by the unittest framework and our unittest + adapters to integrate with buck/tpx. + """ + suite = super().loadTestsFromName(name, module) + for test in suite: + if isinstance(test, unittest.loader._FailedTest): + # _FailedTest means that the test module couldn't be loaded + # (usually, because of a bad import). Instead of pretending to + # execute a synthetic test case + # `unittest.loader._FailedTest()` and reporting + # it to the downstream consumers, we should hard fail. + # When static listing is used this will let TPX to associate the + # failure to either the main test (for bundled execution) or + # individual test cases (regular execution) in a test target, + # and not to the synthetic _FailedTest case. + print(test._exception, file=sys.stderr) + sys.exit(1) + + return suite + class Loader: def __init__(self, modules, regex=None): @@ -657,11 +688,20 @@ def run(self): if self.options.list: for test in self.get_tests(test_suite): + # Python 3.12 changed the implementation of `TestCase.__str__`. + # We construct the name manually here to ensure consistency between + # Python versions. + # Example: "test_basic (tests.test_object.TestAbsent)". + method_name = getattr(test, "_testMethodName", "") + cls = test.__class__ if self.options.list_format == "python": - name = str(test) + if method_name: + name = f"{method_name} ({cls.__module__}.{cls.__qualname__})" + else: + name = str(test) + elif self.options.list_format == "buck": - method_name = getattr(test, "_testMethodName", "") - name = _format_test_name(test.__class__, method_name) + name = _format_test_name(cls, method_name) else: raise Exception( "Bad test list format: %s" % (self.options.list_format,) @@ -757,12 +797,12 @@ def convert_to_diff_cov_str(self, analysis): analysis[3][-1] if len(analysis[3]) else 0, ) lines = ["N"] * numLines - for l in analysis[1]: - lines[l - 1] = "C" - for l in analysis[2]: - lines[l - 1] = "X" - for l in analysis[3]: - lines[l - 1] = "U" + for line in analysis[1]: + lines[line - 1] = "C" + for line in analysis[2]: + lines[line - 1] = "X" + for line in analysis[3]: + lines[line - 1] = "U" return "".join(lines) diff --git a/prelude/python/tools/compile.py b/prelude/python/tools/compile.py index 22016141846..b7d6470f04e 100644 --- a/prelude/python/tools/compile.py +++ b/prelude/python/tools/compile.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Example usage: $ cat inputs.manifest @@ -14,30 +16,29 @@ out-dir/foo.pyc """ -# pyre-unsafe - import argparse import errno import json import os import sys from py_compile import compile, PycInvalidationMode +from typing import List if sys.version_info[0] == 3: import importlib import importlib.util - DEFAULT_FORMAT = importlib.util.cache_from_source("{pkg}/{name}.py") + DEFAULT_FORMAT: str = importlib.util.cache_from_source("{pkg}/{name}.py") else: - DEFAULT_FORMAT = "{pkg}/{name}.pyc" + DEFAULT_FORMAT: str = "{pkg}/{name}.pyc" -def get_py_path(module): +def get_py_path(module: str) -> str: return module.replace(".", os.sep) + ".py" -def get_pyc_path(module, fmt): +def get_pyc_path(module: str, fmt: str) -> str: try: package, name = module.rsplit(".", 1) except ValueError: @@ -51,7 +52,7 @@ def get_pyc_path(module, fmt): return os.path.join(*parts) -def _mkdirs(dirpath): +def _mkdirs(dirpath: str) -> None: try: os.makedirs(dirpath) except OSError as e: @@ -59,7 +60,7 @@ def _mkdirs(dirpath): raise -def main(argv): +def main(argv: List[str]) -> None: parser = argparse.ArgumentParser(fromfile_prefix_chars="@") parser.add_argument("-o", "--output", required=True) parser.add_argument( @@ -69,6 +70,7 @@ def main(argv): parser.add_argument( "--invalidation-mode", type=str, + # pyre-fixme[16]: `int` has no attribute `name`. default=PycInvalidationMode.UNCHECKED_HASH.name, choices=[m.name for m in PycInvalidationMode], ) diff --git a/prelude/python/tools/extract.py b/prelude/python/tools/extract.py index 6b3eef77c61..15b52b07a1f 100755 --- a/prelude/python/tools/extract.py +++ b/prelude/python/tools/extract.py @@ -15,44 +15,159 @@ """ import argparse +import configparser +import glob +import json import os +import shutil import stat +import tarfile +import tempfile import zipfile from pathlib import Path +from typing import Optional + + +def strip_soabi_tag(path: Path) -> Optional[Path]: + """ + Helper to strip any SOABI tag from the given extension path. Returns `None` + if no stripping is performed. + """ + + suffixes = path.suffixes[-2:] + + # SOABI tagged extensions should have two suffixes. + if len(suffixes) != 2: + return None + + # Not an extension. + ext = "" + for ext in (".so", ".pyd"): + if suffixes[1] == ext: + break + else: + return None + + # TODO(agallagher): Is there a better way to detect these tags? + if not (suffixes[0].startswith(".cpython-") or suffixes[0] == ".abi3"): + return None + + return path.with_suffix("").with_suffix(ext) # shutil.unpack_archive calls zipfile.extract which does *not* preserve file attributes # (see https://bugs.python.org/issue15795, https://stackoverflow.com/questions/39296101/python-zipfile-removes-execute-permissions-from-binaries). # # We need to preserve at least the executable bit. -def extract_zip_with_permissions(src: Path, dst_dir: Path) -> None: - z = zipfile.ZipFile(src) - for info in z.infolist(): - outfile = z.extract(info.filename, dst_dir) +def extract(src: Path, dst_dir: Path, strip_soabi_tags: bool = False) -> None: + if src.suffixes[-2:] == [".tar", ".gz"]: + with tempfile.TemporaryDirectory() as tmp_dir: + with tarfile.open(src) as tf: + tf.extractall(tmp_dir) - execute_perms = (info.external_attr >> 16) & ( - stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH - ) - if execute_perms: - st = os.stat(outfile) - new_mode = stat.S_IMODE(st.st_mode | execute_perms) - if new_mode != st.st_mode: - os.chmod(outfile, new_mode) + # We expect the tgz to contain a single top-level dir with all the + # items to unpack. + (path,) = glob.glob(os.path.join(tmp_dir, "*")) + for ent in os.listdir(path): + fsrc = os.path.join(path, ent) + fdst = Path(os.path.join(dst_dir, ent)) + soabi_less_dst = strip_soabi_tag(fdst) + if soabi_less_dst is not None: + fdst = soabi_less_dst + shutil.move(fsrc, fdst) + + else: + with zipfile.ZipFile(src) as z: + for info in z.infolist(): + outfile = Path(z.extract(info.filename, dst_dir)) + if strip_soabi_tags: + soabi_less_outfile = strip_soabi_tag(outfile) + if soabi_less_outfile is not None: + os.rename(outfile, soabi_less_outfile) + outfile = soabi_less_outfile + execute_perms = (info.external_attr >> 16) & ( + stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH + ) + if execute_perms: + st = os.stat(outfile) + new_mode = stat.S_IMODE(st.st_mode | execute_perms) + if new_mode != st.st_mode: + os.chmod(outfile, new_mode) def main() -> None: parser = argparse.ArgumentParser( - description="Extract .zip files to a directory in a cross platform manner" + description="Extract .zip/.tar.gz archives to a directory in a cross platform manner" ) parser.add_argument( "--output", type=Path, required=True, help="The directory to write to" ) + parser.add_argument("--strip-soabi-tags", action="store_true") + parser.add_argument("--entry-points", type=Path, help="The directory to write to") + parser.add_argument( + "--cxx-header-dirs", + type=Path, + help="A file to write out inferred C++ include dirs to", + ) + parser.add_argument( + "--entry-points-manifest", type=Path, help="The directory to write to" + ) parser.add_argument("src", type=Path, help="The archive to extract to --output") args = parser.parse_args() args.output.mkdir(parents=True, exist_ok=True) - extract_zip_with_permissions(args.src, args.output) + extract( + src=args.src, + dst_dir=args.output, + strip_soabi_tags=args.strip_soabi_tags, + ) + + # Infer C++ header dirs. + if args.cxx_header_dirs is not None: + with open(args.cxx_header_dirs, mode="w") as f: + for root, dirs, _files in os.walk(args.output): + root = os.path.relpath(root, args.output) + if "include" in dirs: + print(os.path.normpath(os.path.join(root, "include")), file=f) + + # Extract any "entry points" from the wheel, and generate scripts from them + # (just like `pip install` would do). + if args.entry_points is not None: + entry_points = glob.glob( + os.path.join(args.output, "*.dist-info", "entry_points.txt") + ) + os.makedirs(args.entry_points, exist_ok=True) + manifest = [] + if entry_points: + (entry_points,) = entry_points + config = configparser.ConfigParser() + config.read(entry_points) + if config.has_section("console_scripts"): + for name, entry_point in config.items("console_scripts"): + mod, func = entry_point.split(":") + path = os.path.join(args.entry_points, name) + manifest.append( + (name, path, os.path.relpath(entry_points, args.output)) + ) + with open(path, mode="w") as bf: + bf.write( + """\ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +import re +import sys +from {mod} import {func} +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\\.pyw|\\.exe)?$', '', sys.argv[0]) + sys.exit({func}()) +""".format( + mod=mod, func=func + ) + ) + os.chmod(path, 0o777) + with open(args.entry_points_manifest, mode="w") as f: + json.dump(manifest, f) if __name__ == "__main__": diff --git a/prelude/python/tools/make_par/BUCK b/prelude/python/tools/make_par/BUCK index da10d3ec0c7..62d07e0989a 100644 --- a/prelude/python/tools/make_par/BUCK +++ b/prelude/python/tools/make_par/BUCK @@ -3,8 +3,12 @@ # as it is the only `TARGETS` (not `TARGETS.v2`) in the prelude. # Configuring the tools to do it right seemed more dangerous than just having a caveat on this one file. +load("@prelude//utils:source_listing.bzl", "source_listing") + oncall("build_infra") +source_listing() + export_file( name = "__run_lpar_main__.py", src = "__run_lpar_main__.py", diff --git a/prelude/python/tools/make_par/__run_lpar_main__.py b/prelude/python/tools/make_par/__run_lpar_main__.py index 8605c5ef3e7..1a05d95abc2 100644 --- a/prelude/python/tools/make_par/__run_lpar_main__.py +++ b/prelude/python/tools/make_par/__run_lpar_main__.py @@ -8,14 +8,8 @@ # pyre-strict -# -# Put everything inside an __invoke_main() function. -# This way anything we define won't pollute globals(), since runpy -# will propagate our globals() as to the user's main module. -# pyre-fixme[3]: Return type must be annotated. -def __invoke_main(): +def __invoke_main() -> None: import os - import runpy import sys module = os.getenv("FB_PAR_MAIN_MODULE") @@ -24,50 +18,14 @@ def __invoke_main(): sys.argv[0] = os.getenv("FB_LPAR_INVOKED_NAME", sys.argv[0]) del sys.path[0] - main_runner_module = os.getenv("FB_PAR_MAIN_RUNNER_MODULE") - main_runner_function = os.getenv("FB_PAR_MAIN_RUNNER_FUNCTION") - - if main_runner_module and main_runner_function: - from importlib import import_module - - mod = import_module(main_runner_module) - run_as_main = getattr(mod, main_runner_function) - run_as_main(module, main_function) - return - - #### BUCK1-ONLY CODE FOLLOWS #### - - # Allow users to decorate the main module. In normal Python invocations - # this can be done by prefixing the arguments with `-m decoratingmodule`. - # It's not that easy for par files. The startup script sets up `sys.path` - # from within the Python interpreter. Enable decorating the main module - # after `sys.path` has been setup by setting the PAR_MAIN_OVERRIDE - # environment variable. - decorate_main_module = os.environ.pop("PAR_MAIN_OVERRIDE", None) - if decorate_main_module: - # Pass the original main module as environment variable for the process. - # Allowing the decorating module to pick it up. - # pyre-fixme[6]: For 2nd argument expected `str` but got `Optional[str]`. - os.environ["PAR_MAIN_ORIGINAL"] = module - module = decorate_main_module - - if main_function: - assert module - from importlib import import_module - - mod = import_module(module) - main = getattr(mod, main_function) - # This is normally done by `runpy._run_module_as_main`, and is - # important to make multiprocessing work - sys.modules["__main__"] = mod - main() - return + main_runner_module = os.environ["FB_PAR_MAIN_RUNNER_MODULE"] + main_runner_function = os.environ["FB_PAR_MAIN_RUNNER_FUNCTION"] - del os - del sys + from importlib import import_module - # pyre-fixme[16]: Module `runpy` has no attribute `_run_module_as_main`. - runpy._run_module_as_main(module, False) + mod = import_module(main_runner_module) + run_as_main = getattr(mod, main_runner_function) + run_as_main(module, main_function) __invoke_main() diff --git a/prelude/python/tools/make_par/_lpar_bootstrap.sh.template b/prelude/python/tools/make_par/_lpar_bootstrap.sh.template index fc26d742684..2e01a66bde4 100644 --- a/prelude/python/tools/make_par/_lpar_bootstrap.sh.template +++ b/prelude/python/tools/make_par/_lpar_bootstrap.sh.template @@ -19,7 +19,10 @@ export {lib_path_env}={ld_library_path} if [ -n "${{PYTHONPATH+SET}}" ]; then export FB_SAVED_PYTHONPATH=$PYTHONPATH fi -export PYTHONPATH=$BASE_DIR + +# The following expands to ":PAR_APPEND_PYTHONPATH" when $PAR_APPEND_PYTHONPATH is set +# This is important: we don't want a trailing colon in $PYTHONPATH. +export PYTHONPATH=$BASE_DIR${{PAR_APPEND_PYTHONPATH:+:$PAR_APPEND_PYTHONPATH}} if [ -n "${{PYTHONHOME+SET}}" ]; then export FB_SAVED_PYTHONHOME=$PYTHONHOME fi diff --git a/prelude/python/tools/make_par/sitecustomize.py b/prelude/python/tools/make_par/sitecustomize.py index 152a34e1771..31c12b6575f 100644 --- a/prelude/python/tools/make_par/sitecustomize.py +++ b/prelude/python/tools/make_par/sitecustomize.py @@ -10,6 +10,7 @@ from __future__ import annotations +import itertools import multiprocessing.util as mp_util import os import sys @@ -100,8 +101,9 @@ def __startup__() -> None: def __passthrough_exec_module() -> None: # Delegate this module execution to the next module in the path, if any, # effectively making this sitecustomize.py a passthrough module. + paths = itertools.dropwhile(lambda p: not __file__.startswith(p), sys.path) spec = PathFinder.find_spec( - __name__, path=[p for p in sys.path if not __file__.startswith(p)] + __name__, path=[p for p in paths if not __file__.startswith(p)] ) if spec: mod = module_from_spec(spec) diff --git a/prelude/python/tools/make_py_package_inplace.py b/prelude/python/tools/make_py_package_inplace.py index d966770e9f1..9abccde90d8 100755 --- a/prelude/python/tools/make_py_package_inplace.py +++ b/prelude/python/tools/make_py_package_inplace.py @@ -130,6 +130,13 @@ def parse_args() -> argparse.Namespace: ), help="The dynamic loader env used to find native library deps", ) + parser.add_argument( + "--native-library-runtime-path", + dest="native_library_runtime_paths", + default=[], + action="append", + help="The dynamic loader env used to find native library deps", + ) parser.add_argument( "-e", "--runtime_env", @@ -139,6 +146,10 @@ def parse_args() -> argparse.Namespace: ) # Compatibility with existing make_par scripts parser.add_argument("--passthrough", action="append", default=[]) + # No-op, added for compatibility with existing make_par scripts + parser.add_argument( + "--omnibus-debug-info", choices=["separate", "strip", "extract"] + ) return parser.parse_args() @@ -157,6 +168,7 @@ def write_bootstrapper(args: argparse.Namespace) -> None: # Because this can be invoked from other directories, find the relative path # from this .par to the modules dir, and use that. relative_modules_dir = os.path.relpath(args.modules_dir, args.output.parent) + native_lib_dirs = [relative_modules_dir] + args.native_library_runtime_paths # TODO(nmj): Remove this hack. So, if arg0 in your shebang is a bash script # (like /usr/local/fbcode/platform007/bin/python3.7 on macs is) @@ -168,9 +180,9 @@ def write_bootstrapper(args: argparse.Namespace) -> None: # exclude it for now, because linux doesn't like multiple args # after /usr/bin/env - ld_preload = "None" + ld_preload = None if args.preload_libraries: - ld_preload = repr(":".join(p.name for p in args.preload_libraries)) + ld_preload = [p.name for p in args.preload_libraries] new_data = data.replace("", "/usr/bin/env " + str(args.python)) new_data = new_data.replace("", "") @@ -189,9 +201,9 @@ def write_bootstrapper(args: argparse.Namespace) -> None: # Things that are only required for the full template new_data = new_data.replace("", args.native_libs_env_var) - new_data = new_data.replace("", repr(relative_modules_dir)) + new_data = new_data.replace("", repr(native_lib_dirs)) new_data = new_data.replace("", "LD_PRELOAD") - new_data = new_data.replace("", ld_preload) + new_data = new_data.replace("", repr(ld_preload)) if args.runtime_env: runtime_env = dict(e.split("=", maxsplit=1) for e in args.runtime_env) diff --git a/prelude/python/tools/make_source_db.py b/prelude/python/tools/make_source_db.py index ef7f638328a..a89d50efb56 100755 --- a/prelude/python/tools/make_source_db.py +++ b/prelude/python/tools/make_source_db.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Creates a Python Source DB JSON file containing both a rule's immediate sources and the sources of all transitive dependencies (e.g. for use with Pyre). @@ -34,19 +36,18 @@ } """ -# pyre-unsafe - import argparse import json import sys +from typing import List, Tuple -def _load(path): +def _load(path: str) -> List[Tuple[str, str, str]]: with open(path) as f: return json.load(f) -def main(argv): +def main(argv: List[str]) -> None: parser = argparse.ArgumentParser(fromfile_prefix_chars="@") parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout) parser.add_argument("--sources") diff --git a/prelude/python/tools/make_source_db_no_deps.py b/prelude/python/tools/make_source_db_no_deps.py index 547df485437..4493274eb1f 100644 --- a/prelude/python/tools/make_source_db_no_deps.py +++ b/prelude/python/tools/make_source_db_no_deps.py @@ -30,18 +30,15 @@ import argparse import json import sys +from typing import List, Tuple -# pyre-fixme[3]: Return type must be annotated. -# pyre-fixme[2]: Parameter must be annotated. -def _load(path): +def _load(path: str) -> List[Tuple[str, str, str]]: with open(path) as f: return json.load(f) -# pyre-fixme[3]: Return type must be annotated. -# pyre-fixme[2]: Parameter must be annotated. -def main(argv): +def main(argv: List[str]) -> None: parser = argparse.ArgumentParser(fromfile_prefix_chars="@") parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout) parser.add_argument("sources") diff --git a/prelude/python/tools/parse_imports.py b/prelude/python/tools/parse_imports.py index 6b97b5de67d..1e5e259af4b 100644 --- a/prelude/python/tools/parse_imports.py +++ b/prelude/python/tools/parse_imports.py @@ -116,9 +116,7 @@ def visit(self, node: ast.AST) -> None: try: return super().visit(node) except AttributeError as exc: - logger.error( - "Got %r when parsing %s from %s", exc, ast.dump(node), self.path - ) + logger.error(f"Got {exc} when parsing {ast.dump(node)} from {self.path}") def visit_Module(self, node: ast.Module) -> None: self._top_level = set(node.body) diff --git a/prelude/python/tools/run_inplace.py.in b/prelude/python/tools/run_inplace.py.in index 1d8dadeae1a..8ea96bae17c 100644 --- a/prelude/python/tools/run_inplace.py.in +++ b/prelude/python/tools/run_inplace.py.in @@ -12,7 +12,7 @@ main_module = "" main_function = "" modules_dir = "" native_libs_env_var = "" -native_libs_dir = +native_libs_dirs = native_libs_preload_env_var = "" native_libs_preload = interpreter_flags = "" @@ -36,10 +36,19 @@ if platform.system() == "Windows" and not dirpath.startswith(unc_prefix): env_vals_to_restore = {} # Update the environment variable for the dynamic loader to the native # libraries location. -if native_libs_dir is not None: - old_native_libs_dir = os.environ.get(native_libs_env_var) - os.environ[native_libs_env_var] = os.path.join(dirpath, native_libs_dir) - env_vals_to_restore[native_libs_env_var] = old_native_libs_dir +if native_libs_dirs is not None: + old_native_libs_dirs = os.environ.get(native_libs_env_var) + os.environ[native_libs_env_var] = os.pathsep.join([ + os.path.join(dirpath, native_libs_dir) + for native_libs_dir in native_libs_dirs + ]) + env_vals_to_restore[native_libs_env_var] = old_native_libs_dirs +if os.environ.get("PAR_APPEND_LD_LIBRARY_PATH") is not None: + os.environ[native_libs_env_var] = ( + (os.environ[native_libs_env_var] + ":" + os.environ["PAR_APPEND_LD_LIBRARY_PATH"]) + if os.environ.get(native_libs_env_var) is not None + else os.environ["PAR_APPEND_LD_LIBRARY_PATH"] + ) # Update the environment variable for the dynamic loader to find libraries # to preload. @@ -48,10 +57,24 @@ if native_libs_preload is not None: env_vals_to_restore[native_libs_preload_env_var] = old_native_libs_preload # On macos, preloaded libs are found via paths. - os.environ[native_libs_preload_env_var] = ":".join( - os.path.join(dirpath, native_libs_dir, l) - for l in native_libs_preload.split(":") - ) + if platform.system() == "Darwin": + full_path_preloads = [] + for lib in native_libs_preload: + for native_libs_dir in native_libs_dirs: + fpath = os.path.join(dirpath, native_libs_dir, lib) + if os.path.exists(fpath): + full_path_preloads.append(fpath) + break + else: + raise Exception( + "cannot find preload lib {!r} in paths {!r}".format( + lib, + native_libs_dirs, + ), + ) + native_libs_preload = full_path_preloads + + os.environ[native_libs_preload_env_var] = os.pathsep.join(native_libs_preload) @@ -101,12 +124,13 @@ def __run(): # On windows, adjust os.add_dll_directory and PATH (for `ctypes.util.find_library`) # so that native libraries can be found by the dynamic linker or ctypes if sys.platform.startswith("win"): - d = {os.path.join(dirpath, native_libs_dir)!r} - os.add_dll_directory(d) path = os.environ.get("PATH", "") - if path and not path.endswith(os.pathsep): - path += os.pathsep - path += d + for native_libs_dir in {native_libs_dirs!r}: + d = os.path.join({dirpath!r}, native_libs_dir) + os.add_dll_directory(d) + if path and not path.endswith(os.pathsep): + path += os.pathsep + path += d setenv("PATH", path) from import as run_as_main @@ -153,6 +177,8 @@ for env in ("PYTHONPATH", "LD_LIBRARY_PATH", "LD_PRELOAD", path = os.path.join(dirpath, modules_dir) os.environ["PYTHONPATH"] = path +if "PAR_APPEND_PYTHONPATH" in os.environ: + os.environ["PYTHONPATH"] += ":" + os.environ["PAR_APPEND_PYTHONPATH"] # This environment variable is immediately unset on startup but will also appear # in e.g. `multiprocessing` workers, and so serves as an audit trail back to diff --git a/prelude/python/tools/sourcedb_merger/BUCK.v2 b/prelude/python/tools/sourcedb_merger/BUCK.v2 index 7f090f90ea9..2521ab5a741 100644 --- a/prelude/python/tools/sourcedb_merger/BUCK.v2 +++ b/prelude/python/tools/sourcedb_merger/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_library( @@ -13,19 +19,19 @@ prelude.python_bootstrap_library( prelude.python_bootstrap_binary( name = "merge", main = "merge.py", + visibility = ["PUBLIC"], deps = [ ":library", ], - visibility = ["PUBLIC"], ) prelude.python_bootstrap_binary( name = "legacy_merge", main = "legacy_merge.py", + visibility = ["PUBLIC"], deps = [ ":library", ], - visibility = ["PUBLIC"], ) # Run the test suite with this command: diff --git a/prelude/python/tools/sourcedb_merger/inputs.py b/prelude/python/tools/sourcedb_merger/inputs.py index db2d4ed2411..2f298a25882 100644 --- a/prelude/python/tools/sourcedb_merger/inputs.py +++ b/prelude/python/tools/sourcedb_merger/inputs.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import dataclasses import json diff --git a/prelude/python/tools/sourcedb_merger/legacy_merge.py b/prelude/python/tools/sourcedb_merger/legacy_merge.py index b6d977d11d5..0eb16221f9b 100644 --- a/prelude/python/tools/sourcedb_merger/legacy_merge.py +++ b/prelude/python/tools/sourcedb_merger/legacy_merge.py @@ -6,7 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import argparse import pathlib diff --git a/prelude/python/tools/sourcedb_merger/legacy_outputs.py b/prelude/python/tools/sourcedb_merger/legacy_outputs.py index 89ecd93fe66..ab225f9b05c 100644 --- a/prelude/python/tools/sourcedb_merger/legacy_outputs.py +++ b/prelude/python/tools/sourcedb_merger/legacy_outputs.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import dataclasses import json diff --git a/prelude/python/tools/sourcedb_merger/merge.py b/prelude/python/tools/sourcedb_merger/merge.py index b21a00a9e6b..f4dd4845b7f 100644 --- a/prelude/python/tools/sourcedb_merger/merge.py +++ b/prelude/python/tools/sourcedb_merger/merge.py @@ -6,7 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import argparse import pathlib diff --git a/prelude/python/tools/sourcedb_merger/outputs.py b/prelude/python/tools/sourcedb_merger/outputs.py index 40e6ef6885b..2565f75c5de 100644 --- a/prelude/python/tools/sourcedb_merger/outputs.py +++ b/prelude/python/tools/sourcedb_merger/outputs.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import dataclasses import json diff --git a/prelude/python/tools/sourcedb_merger/tests/__init__.py b/prelude/python/tools/sourcedb_merger/tests/__init__.py index 858c58d3c29..2444a8d0f43 100644 --- a/prelude/python/tools/sourcedb_merger/tests/__init__.py +++ b/prelude/python/tools/sourcedb_merger/tests/__init__.py @@ -5,8 +5,32 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + +# pyre-fixme[21]: Could not find name `BuildMapLoadError` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `PartialBuildMap` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `Target` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `TargetEntry` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `load_targets_and_build_maps_from_json` in +# `tests.inputs_test`. from .inputs_test import * # noqa + +# pyre-fixme[21]: Could not find name `ConflictInfo` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `ConflictMap` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `FullBuildMap` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `MergeResult` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `PartialBuildMap` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `SourceInfo` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `Target` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `TargetEntry` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `merge_partial_build_maps` in +# `tests.legacy_output_test`. from .legacy_output_test import * # noqa + +# pyre-fixme[21]: Could not find name `PartialBuildMap` in `tests.outputs_test`. +# pyre-fixme[21]: Could not find name `Target` in `tests.outputs_test`. +# pyre-fixme[21]: Could not find name `TargetEntry` in `tests.outputs_test`. +# pyre-fixme[21]: Could not find name `merge_partial_build_maps` in +# `tests.outputs_test`. from .outputs_test import * # noqa diff --git a/prelude/python/tools/sourcedb_merger/tests/inputs_test.py b/prelude/python/tools/sourcedb_merger/tests/inputs_test.py index da77a5463e9..75a8ce7e3ea 100644 --- a/prelude/python/tools/sourcedb_merger/tests/inputs_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/inputs_test.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import contextlib import json diff --git a/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py b/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py index 4457ac4177b..3d587cbb87e 100644 --- a/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import unittest from typing import Mapping diff --git a/prelude/python/tools/sourcedb_merger/tests/outputs_test.py b/prelude/python/tools/sourcedb_merger/tests/outputs_test.py index 7b3db01c2c1..cb147a3dbaa 100644 --- a/prelude/python/tools/sourcedb_merger/tests/outputs_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/outputs_test.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import unittest diff --git a/prelude/python/tools/static_extension_utils.cpp b/prelude/python/tools/static_extension_utils.cpp index 1470561cbd7..7e620935a59 100644 --- a/prelude/python/tools/static_extension_utils.cpp +++ b/prelude/python/tools/static_extension_utils.cpp @@ -114,7 +114,6 @@ static PyObject* _create_module(PyObject* self, PyObject* spec) { static PyObject* _exec_module(PyObject* self, PyObject* module) { PyModuleDef* def; - int res; // TODO errors if (!PyModule_Check(module)) { @@ -128,8 +127,9 @@ static PyObject* _exec_module(PyObject* self, PyObject* module) { Py_RETURN_NONE; } - res = PyModule_ExecDef(module, def); - // TODO check res + // TODO check this result + PyModule_ExecDef(module, def); + Py_RETURN_NONE; } diff --git a/prelude/python/tools/wheel.py b/prelude/python/tools/wheel.py new file mode 100644 index 00000000000..0dc378cdd5c --- /dev/null +++ b/prelude/python/tools/wheel.py @@ -0,0 +1,149 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-strict + +import argparse +import configparser +import contextlib +import io +import json +import os +import sys +import zipfile +from types import TracebackType +from typing import cast, Dict, List, Optional, Set, Tuple, Type + + +# pyre-fixme[24]: Generic type `AbstractContextManager` expects 1 type parameter. +class WheelBuilder(contextlib.AbstractContextManager): + + def __init__( + self, + *, + name: str, + version: str, + output: str, + entry_points: Optional[Dict[str, str]] = None, + metadata: Optional[List[Tuple[str, str]]] = None, + ) -> None: + self._name = name + self._version = version + self._record: list[str] = [] + self._outf = zipfile.ZipFile(output, mode="w") + self._entry_points: Optional[Dict[str, str]] = entry_points + self._metadata: List[Tuple[str, str]] = [] + self._metadata.append(("Name", name)) + self._metadata.append(("Version", version)) + if metadata is not None: + self._metadata.extend(metadata) + + def _dist_info(self, *path: str) -> str: + return os.path.join(f"{self._name}-{self._version}.dist-info", *path) + + def _data(self, *path: str) -> str: + return os.path.join(f"{self._name}-{self._version}.data", *path) + + def write(self, dst: str, src: str) -> None: + self._record.append(dst) + self._outf.write(filename=src, arcname=dst) + + def write_data(self, dst: str, src: str) -> None: + self.write(self._data(dst), src) + + def writestr(self, dst: str, contents: str) -> None: + self._record.append(dst) + self._outf.writestr(zinfo_or_arcname=dst, data=contents) + + def _write_record(self) -> None: + record = self._dist_info("RECORD") + self._outf.writestr( + record, "".join(["{},,\n".format(f) for f in (self._record + [record])]) + ) + + def close(self) -> None: + self.writestr( + self._dist_info("METADATA"), + "".join(["{}: {}\n".format(key, val) for key, val in self._metadata]), + ) + self.writestr( + self._dist_info("WHEEL"), + """\ +Wheel-Version: 1.0 +""", + ) + + # Write entry points. + if self._entry_points is not None: + config = configparser.ConfigParser() + config.read_dict(cast(Dict[str, Dict[str, str]], self._entry_points)) + with io.TextIOWrapper( + self._outf.open(self._dist_info("entry_points.txt"), mode="w"), + encoding="utf-8", + ) as f: + config.write(f) + + self._write_record() + self._outf.close() + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.close() + + +def main(argv: List[str]) -> None: + parser = argparse.ArgumentParser() + parser.add_argument("--output", required=True) + parser.add_argument("--name", required=True) + parser.add_argument("--version", required=True) + parser.add_argument("--entry-points", default=None) + parser.add_argument("--srcs", action="append", default=[]) + parser.add_argument("--metadata", nargs=2, action="append", default=[]) + parser.add_argument("--data", nargs=2, action="append", default=[]) + args = parser.parse_args(argv[1:]) + + pkgs: Set[str] = set() + pkgs_with_init = set() + + def _add_pkg(pkg: str) -> None: + pkgs.add(pkg) + parent = os.path.dirname(pkg) + if parent: + _add_pkg(parent) + + with WheelBuilder( + name=args.name, + version=args.version, + output=args.output, + entry_points=( + json.loads(args.entry_points) if args.entry_points is not None else None + ), + metadata=args.metadata, + ) as whl: + for src in args.srcs: + with open(src) as f: + manifest = json.load(f) + for dst, src, *_ in manifest: + if dst.endswith((".py", ".so")): + pkg = os.path.dirname(dst) + _add_pkg(pkg) + if os.path.basename(dst) == "__init__.py": + pkgs_with_init.add(pkg) + whl.write(dst, src) + + for dst, src in args.data: + whl.write_data(dst, src) + + for pkg in pkgs - pkgs_with_init: + whl.writestr(os.path.join(pkg, "__init__.py"), "") + + +sys.exit(main(sys.argv)) diff --git a/prelude/python/typecheck/batch.bxl b/prelude/python/typecheck/batch.bxl index 2521ddf1fbf..9d3e8aed84d 100644 --- a/prelude/python/typecheck/batch.bxl +++ b/prelude/python/typecheck/batch.bxl @@ -5,13 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//utils:utils.bzl", "flatten") load("@prelude//python/sourcedb/filter.bxl", "do_filter") -def _run_entry_point(ctx: bxl.Context) -> None: - uquery = ctx.uquery() - checked_targets = ctx.configured_targets( - do_filter(uquery, [uquery.eval(target) for target in ctx.cli_args.target]), - ) +def check_targets(ctx: bxl.Context, targets: typing.Any) -> None: + checked_targets = ctx.configured_targets(do_filter(ctx.uquery(), targets)) build_result = ctx.build([ target.label.with_sub_target("typecheck") @@ -27,12 +25,16 @@ def _run_entry_point(ctx: bxl.Context) -> None: "root": ctx.root(), }) +def _run_entry_point(ctx: bxl.Context) -> None: + targets = flatten(ctx.cli_args.target) + check_targets(ctx, targets) + run = bxl_main( doc = "Run [typecheck] on a set of targets or target patterns.", impl = _run_entry_point, cli_args = { "target": cli_args.list( - cli_args.string( + cli_args.target_expr( doc = "Target pattern to run type checking on", ), ), diff --git a/prelude/python/typecheck/batch_files.bxl b/prelude/python/typecheck/batch_files.bxl new file mode 100644 index 00000000000..c18b556f19c --- /dev/null +++ b/prelude/python/typecheck/batch_files.bxl @@ -0,0 +1,24 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":batch.bxl", "check_targets") + +def _run_entry_point(ctx: bxl.Context) -> None: + targets = ctx.uquery().owner(ctx.cli_args.source) + check_targets(ctx, targets) + +run = bxl_main( + doc = "Run [typecheck] on the owning targets of given files.", + impl = _run_entry_point, + cli_args = { + "source": cli_args.list( + cli_args.string( + doc = "Files whose owning targets need to be checked", + ), + ), + }, +) diff --git a/prelude/python/typing.bzl b/prelude/python/typing.bzl index e66e083b483..f2a7cef561b 100644 --- a/prelude/python/typing.bzl +++ b/prelude/python/typing.bzl @@ -14,6 +14,37 @@ load(":python.bzl", "PythonLibraryManifestsTSet") DEFAULT_PY_VERSION = "3.10" +def _create_batched_type_check( + ctx: AnalysisContext, + executable: RunInfo, + typeshed_manifest: Artifact, + py_version: str | None, + source_manifests: list[Artifact], + dep_manifests: typing.Any, + hidden: typing.Any) -> Artifact: + cmd = [executable] + + # Create input configs + input_config = { + "dependencies": dep_manifests, + "py_version": py_version or DEFAULT_PY_VERSION, + "sources": source_manifests, + "typeshed": typeshed_manifest, + } + + input_file = ctx.actions.write_json( + "type_check_config.json", + input_config, + with_inputs = True, + ) + output_file = ctx.actions.declare_output("type_check_result.json") + cmd.append(cmd_args(input_file)) + cmd.append(cmd_args(output_file.as_output(), format = "--output={}")) + + ctx.actions.run(cmd_args(cmd, hidden = hidden), category = "type_check") + + return output_file + def create_per_target_type_check( ctx: AnalysisContext, executable: RunInfo, @@ -22,45 +53,42 @@ def create_per_target_type_check( typeshed: ManifestInfo | None, py_version: str | None, typing_enabled: bool) -> DefaultInfo: - output_file_name = "type_check_result.json" if not typing_enabled: # Use empty dict to signal that no type checking was performed. - output_file = ctx.actions.write_json(output_file_name, {}) - else: - cmd = cmd_args(executable) - cmd.add(cmd_args("check")) + output_file = ctx.actions.write_json("type_check_result.json", {}) + return DefaultInfo(default_output = output_file) - # Source artifacts - source_manifests = [] - if srcs != None: - source_manifests = [srcs.manifest] - cmd.hidden([a for a, _ in srcs.artifacts]) + hidden = [] - # Dep artifacts - dep_manifest_tset = ctx.actions.tset(PythonLibraryManifestsTSet, children = [d.manifests for d in deps]) - dep_manifests = dep_manifest_tset.project_as_args("source_type_manifests") - cmd.hidden(dep_manifest_tset.project_as_args("source_type_artifacts")) + # Dep artifacts + dep_manifest_tset = ctx.actions.tset( + PythonLibraryManifestsTSet, + children = [d.manifests for d in deps], + ) + dep_manifests = dep_manifest_tset.project_as_args("source_type_manifests") + hidden.append(dep_manifest_tset.project_as_args("source_type_artifacts")) - # Typeshed artifacts - if typeshed != None: - cmd.hidden([a for a, _ in typeshed.artifacts]) - typeshed_manifest = typeshed.manifest - else: - typeshed_manifest = None - - # Create input configs - input_config = { - "dependencies": dep_manifests, - "py_version": py_version or DEFAULT_PY_VERSION, - "sources": source_manifests, - "typeshed": typeshed_manifest, - } - - input_file = ctx.actions.write_json("type_check_config.json", input_config, with_inputs = True) - output_file = ctx.actions.declare_output(output_file_name) - cmd.add(cmd_args(input_file)) - cmd.add(cmd_args(output_file.as_output(), format = "--output={}")) + # Typeshed artifacts + if typeshed != None: + hidden.extend([a for a, _ in typeshed.artifacts]) + typeshed_manifest = typeshed.manifest + else: + typeshed_manifest = None - ctx.actions.run(cmd, category = "type_check") + # Source artifacts + source_manifests = [] + if srcs != None: + source_manifests.append(srcs.manifest) + hidden.extend([a for a, _ in srcs.artifacts]) - return DefaultInfo(default_output = output_file) + return DefaultInfo( + default_output = _create_batched_type_check( + ctx, + executable, + typeshed_manifest, + py_version, + source_manifests, + dep_manifests, + hidden, + ), + ) diff --git a/prelude/python_bootstrap/python_bootstrap.bzl b/prelude/python_bootstrap/python_bootstrap.bzl index b4c0d021bd1..ef628945dc3 100644 --- a/prelude/python_bootstrap/python_bootstrap.bzl +++ b/prelude/python_bootstrap/python_bootstrap.bzl @@ -43,15 +43,18 @@ def python_bootstrap_binary_impl(ctx: AnalysisContext) -> list[Provider]: interpreter = ctx.attrs._python_bootstrap_toolchain[PythonBootstrapToolchainInfo].interpreter - run_args = cmd_args() if ctx.attrs._win_python_wrapper != None: - run_args.add(ctx.attrs._win_python_wrapper[RunInfo]) - run_args.add(run_tree) - run_args.add(interpreter) - run_args.add(output) + run_args = cmd_args( + ctx.attrs._win_python_wrapper[RunInfo], + run_tree, + interpreter, + output, + ) else: - run_args.add("/usr/bin/env") - run_args.add(cmd_args(run_tree, format = "PYTHONPATH={}")) - run_args.add(interpreter) - run_args.add(output) + run_args = cmd_args( + "/usr/bin/env", + cmd_args(run_tree, format = "PYTHONPATH={}"), + interpreter, + output, + ) return [DefaultInfo(default_output = output), RunInfo(args = run_args)] diff --git a/prelude/python_bootstrap/tools/BUCK.v2 b/prelude/python_bootstrap/tools/BUCK.v2 index b895ef1a52b..e3fb697482f 100644 --- a/prelude/python_bootstrap/tools/BUCK.v2 +++ b/prelude/python_bootstrap/tools/BUCK.v2 @@ -1,8 +1,14 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.sh_binary( name = "win_python_wrapper", main = "win_python_wrapper.bat", - visibility = ["PUBLIC"], target_compatible_with = ["config//os:windows"], + visibility = ["PUBLIC"], ) diff --git a/prelude/python_bootstrap/tools/win_python_wrapper.bat b/prelude/python_bootstrap/tools/win_python_wrapper.bat index 8aa8c4a25a4..f2c8b0cfb79 100644 --- a/prelude/python_bootstrap/tools/win_python_wrapper.bat +++ b/prelude/python_bootstrap/tools/win_python_wrapper.bat @@ -17,5 +17,5 @@ setlocal enabledelayedexpansion set args=;;;;;;%* set args=!args:;;;;;;%1 =! -set PYTHONPATH=%1 +set PYTHONPATH=%~1 %args% diff --git a/prelude/resources.bzl b/prelude/resources.bzl index dd90cf734ba..89126a65fb4 100644 --- a/prelude/resources.bzl +++ b/prelude/resources.bzl @@ -47,7 +47,7 @@ def create_resource_db( """ db = { - name: cmd_args(resource.default_output, delimiter = "").relative_to(binary, parent = 1) + name: cmd_args(resource.default_output, delimiter = "", relative_to = (binary, 1)) for (name, resource) in resources.items() } return ctx.actions.write_json(name, db) diff --git a/prelude/rules.bzl b/prelude/rules.bzl index 64ab5e5f3f3..d132966efa5 100644 --- a/prelude/rules.bzl +++ b/prelude/rules.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//:buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE", "check_buck2_compatibility") +load("@prelude//apple:apple_platforms.bzl", "APPLE_PLATFORMS_KEY") load("@prelude//configurations:rules.bzl", _config_implemented_rules = "implemented_rules") load("@prelude//decls/common.bzl", "prelude_rule") load("@prelude//is_full_meta_repo.bzl", "is_full_meta_repo") @@ -22,7 +23,7 @@ def _unimplemented_impl(name): # some features disabled. return partial(_unimplemented, name) -def _mk_rule(rule_spec: typing.Any, extra_attrs: dict[str, typing.Any] = dict(), **kwargs): +def _mk_rule(rule_spec: typing.Any, extra_attrs: dict[str, typing.Any] = dict(), impl_override: [typing.Callable, None] = None, **kwargs): name = rule_spec.name attributes = rule_spec.attrs @@ -53,8 +54,8 @@ def _mk_rule(rule_spec: typing.Any, extra_attrs: dict[str, typing.Any] = dict(), # Add _apple_platforms to all rules so that we may query the target platform to use until we support configuration # modifiers and can use them to set the configuration to use for operations. - # Map of string identifer to platform. - attributes["_apple_platforms"] = attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}) + # Map of string identifier to platform. + attributes[APPLE_PLATFORMS_KEY] = attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}) extra_args = dict(kwargs) cfg = transitions.get(name) @@ -84,6 +85,8 @@ def _mk_rule(rule_spec: typing.Any, extra_attrs: dict[str, typing.Any] = dict(), impl = extra_impl if not impl: impl = _unimplemented_impl(name) + if impl_override != None: + impl = impl_override if rule_spec.uses_plugins != None: extra_args["uses_plugins"] = rule_spec.uses_plugins @@ -144,7 +147,7 @@ rules = {rule.name: _mk_rule(rule) for rule in _declared_rules.values()} load_symbols(rules) # TODO(akrieger): Remove this and instead refactor to allow impl bzl files to export attrs. -def clone_rule(rule: str, extra_attrs: dict[str, typing.Any] = dict(), **kwargs): +def clone_rule(rule: str, extra_attrs: dict[str, typing.Any] = dict(), impl_override = None, **kwargs): if not rule in _declared_rules: fail("Tried clone rule {} which does not exist".format(rule)) - return _mk_rule(_declared_rules[rule], extra_attrs, **kwargs) + return _mk_rule(_declared_rules[rule], extra_attrs, impl_override, **kwargs) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index c174fcb6224..1221b61aecf 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -15,19 +15,19 @@ load("@prelude//cxx:cxx.bzl", "cxx_binary_impl", "cxx_library_impl", "cxx_precom load("@prelude//cxx:cxx_toolchain.bzl", "cxx_toolchain_extra_attributes", "cxx_toolchain_impl") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo", "HeaderMode") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//cxx:prebuilt_cxx_library_group.bzl", "prebuilt_cxx_library_group_impl") load("@prelude//cxx:windows_resource.bzl", "windows_resource_impl") -load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr") load("@prelude//erlang:erlang.bzl", _erlang_implemented_rules = "implemented_rules") load("@prelude//git:git_fetch.bzl", "git_fetch_impl") -load("@prelude//go:cgo_library.bzl", "cgo_library_impl") load("@prelude//go:coverage.bzl", "GoCoverageMode") load("@prelude//go:go_binary.bzl", "go_binary_impl") load("@prelude//go:go_exported_library.bzl", "go_exported_library_impl") load("@prelude//go:go_library.bzl", "go_library_impl") load("@prelude//go:go_stdlib.bzl", "go_stdlib_impl") load("@prelude//go:go_test.bzl", "go_test_impl") -load("@prelude//go/transitions:defs.bzl", "cgo_enabled_attr", "compile_shared_attr", "coverage_mode_attr", "go_binary_transition", "go_exported_library_transition", "go_test_transition", "race_attr", "tags_attr") +load("@prelude//go/transitions:defs.bzl", "asan_attr", "cgo_enabled_attr", "coverage_mode_attr", "go_binary_transition", "go_exported_library_transition", "go_library_transition", "go_stdlib_transition", "go_test_transition", "race_attr", "tags_attr") +load("@prelude//go_bootstrap:go_bootstrap.bzl", "go_bootstrap_binary_impl") load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl") load("@prelude//haskell:haskell_ghci.bzl", "haskell_ghci_impl") load("@prelude//haskell:haskell_haddock.bzl", "haskell_haddock_impl") @@ -40,6 +40,7 @@ load("@prelude//julia:julia.bzl", _julia_extra_attributes = "extra_attributes", load("@prelude//kotlin:kotlin.bzl", _kotlin_extra_attributes = "extra_attributes", _kotlin_implemented_rules = "implemented_rules") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//lua:cxx_lua_extension.bzl", "cxx_lua_extension_impl") load("@prelude//lua:lua_binary.bzl", "lua_binary_impl") load("@prelude//lua:lua_library.bzl", "lua_library_impl") @@ -48,6 +49,7 @@ load("@prelude//ocaml:attrs.bzl", _ocaml_extra_attributes = "ocaml_extra_attribu load("@prelude//ocaml:ocaml.bzl", "ocaml_binary_impl", "ocaml_library_impl", "ocaml_object_impl", "ocaml_shared_impl", "prebuilt_ocaml_library_impl") load("@prelude//python:cxx_python_extension.bzl", "cxx_python_extension_impl") load("@prelude//python:prebuilt_python_library.bzl", "prebuilt_python_library_impl") +load("@prelude//python:python.bzl", "PythonLibraryInfo") load("@prelude//python:python_binary.bzl", "python_binary_impl") load("@prelude//python:python_library.bzl", "python_library_impl") load("@prelude//python:python_needed_coverage_test.bzl", "python_needed_coverage_test_impl") @@ -55,8 +57,9 @@ load("@prelude//python:python_test.bzl", "python_test_impl") load("@prelude//python_bootstrap:python_bootstrap.bzl", "PythonBootstrapSources", "python_bootstrap_binary_impl", "python_bootstrap_library_impl") load("@prelude//zip_file:zip_file.bzl", _zip_file_extra_attributes = "extra_attributes", _zip_file_implemented_rules = "implemented_rules") load("@prelude//apple/user/apple_resource_transition.bzl", "apple_resource_transition") +load("@prelude//apple/user/target_sdk_version_transition.bzl", "target_sdk_version_transition") load("@prelude//decls/android_rules.bzl", "android_rules") -load("@prelude//decls/common.bzl", "IncludeType", "LinkableDepType", "Linkage", "buck") +load("@prelude//decls/common.bzl", "IncludeType", "LinkableDepType", "buck") load("@prelude//decls/core_rules.bzl", "core_rules") load("@prelude//decls/cxx_rules.bzl", "cxx_rules") load("@prelude//decls/d_rules.bzl", "d_rules") @@ -93,6 +96,14 @@ load(":sh_test.bzl", "sh_test_impl") load(":test_suite.bzl", "test_suite_impl") load(":worker_tool.bzl", "worker_tool") +BUILD_INFO_ATTR = attrs.dict( + key = attrs.string(), + value = attrs.option(attrs.any()), + sorted = False, + default = {}, + doc = "Build info that is passed along here will be late-stamped into a fb_build_info section on the output binary", +) + rule_decl_records = [ android_rules, core_rules, @@ -169,8 +180,8 @@ extra_implemented_rules = struct( git_fetch = git_fetch_impl, #go - cgo_library = cgo_library_impl, go_binary = go_binary_impl, + go_bootstrap_binary = go_bootstrap_binary_impl, go_exported_library = go_exported_library_impl, go_library = go_library_impl, go_test = go_test_impl, @@ -235,6 +246,12 @@ def _cxx_python_extension_attrs(): "allow_suffixing": attrs.bool(default = True), # Copied from cxx_library. "auto_link_groups": attrs.bool(default = False), + + # These flags will only be used to instrument a target + # when coverage for that target is enabled by `exported_needs_coverage_instrumentation` + # or by any of the target's dependencies. + "coverage_instrumentation_compiler_flags": attrs.list(attrs.string(), default = []), + "exported_needs_coverage_instrumentation": attrs.bool(default = False), "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "link_whole": attrs.default_only(attrs.bool(default = True)), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), @@ -249,6 +266,11 @@ def _cxx_python_extension_attrs(): }) return res +def _constraint_overrides_attr(): + return { + "constraint_overrides": attrs.list(attrs.string(), default = []), + } + # Attrs common between python binary/test def _python_executable_attrs(): cxx_binary_attrs = {k: v for k, v in cxx_rules.cxx_binary.attrs.items()} @@ -262,6 +284,8 @@ def _python_executable_attrs(): if key not in python_executable_attrs } + updated_attrs.update(_constraint_overrides_attr()) + # allow non-default value for the args below updated_attrs.update({ "anonymous_link_groups": attrs.bool(default = False), @@ -269,7 +293,6 @@ def _python_executable_attrs(): "bolt_flags": attrs.list(attrs.arg(), default = []), "bolt_profile": attrs.option(attrs.source(), default = None), "compiler_flags": attrs.list(attrs.arg(), default = []), - "constraint_overrides": attrs.list(attrs.string(), default = []), "cxx_main": attrs.source(default = "prelude//python/tools:embedded_main.cpp"), "distributed_thinlto_partial_split_dwarf": attrs.bool(default = False), "enable_distributed_thinlto": attrs.bool(default = False), @@ -277,7 +300,7 @@ def _python_executable_attrs(): "executable_name": attrs.option(attrs.string(), default = None), "inplace_build_args": attrs.list(attrs.arg(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_group_min_binary_node_count": attrs.option(attrs.int(), default = None), "link_style": attrs.enum(LinkableDepType, default = "static"), "main_function": attrs.option( @@ -313,11 +336,14 @@ def _python_executable_attrs(): "package_split_dwarf_dwp": attrs.bool(default = False), "par_style": attrs.option(attrs.string(), default = None), "resources": attrs.named_set(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), sorted = True, default = []), + "run_with_inplace": attrs.bool(default = False), "runtime_env": attrs.option(attrs.dict(key = attrs.string(), value = attrs.string()), default = None), "standalone_build_args": attrs.list(attrs.arg(), default = []), "static_extension_finder": attrs.source(default = "prelude//python/tools:static_extension_finder.py"), "static_extension_utils": attrs.source(default = "prelude//python/tools:static_extension_utils.cpp"), "strip_libpar": attrs.enum(StripLibparStrategy, default = "none"), + "strip_stapsdt": attrs.bool(default = False), + "_build_info": BUILD_INFO_ATTR, "_create_manifest_for_source_dir": _create_manifest_for_source_dir(), "_cxx_hacks": attrs.default_only(attrs.dep(default = "prelude//cxx/tools:cxx_hacks")), "_cxx_toolchain": toolchains_common.cxx(), @@ -331,11 +357,12 @@ def _python_executable_attrs(): def _python_test_attrs(): test_attrs = _python_executable_attrs() test_attrs["_test_main"] = attrs.source(default = "prelude//python/tools:__test_main__.py") + test_attrs["implicit_test_library"] = attrs.option(attrs.dep(providers = [PythonLibraryInfo]), default = None) test_attrs.update(re_test_common.test_args()) return test_attrs def _cxx_binary_and_test_attrs(): - return { + ret = { "anonymous_link_groups": attrs.bool(default = False), "auto_link_groups": attrs.bool(default = False), # Linker flags that only apply to the executable link, used for link @@ -344,19 +371,27 @@ def _cxx_binary_and_test_attrs(): "binary_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "bolt_flags": attrs.list(attrs.arg(), default = []), "bolt_profile": attrs.option(attrs.source(), default = None), - "constraint_overrides": attrs.list(attrs.string(), default = []), + # These flags will only be used to instrument a target + # when coverage for that target is enabled by a header + # selected for coverage either in the target or in one + # of the target's dependencies. + "coverage_instrumentation_compiler_flags": attrs.list(attrs.string(), default = []), "distributed_thinlto_partial_split_dwarf": attrs.bool(default = False), "enable_distributed_thinlto": attrs.bool(default = False), + "exported_needs_coverage_instrumentation": attrs.bool(default = False), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_group_min_binary_node_count": attrs.option(attrs.int(), default = None), "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "link_whole": attrs.default_only(attrs.bool(default = False)), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "resources": attrs.named_set(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), sorted = True, default = []), + "_build_info": BUILD_INFO_ATTR, "_cxx_hacks": attrs.dep(default = "prelude//cxx/tools:cxx_hacks"), "_cxx_toolchain": toolchains_common.cxx(), } + ret.update(_constraint_overrides_attr()) + return ret NativeLinkStrategy = ["separate", "native", "merged"] StripLibparStrategy = ["full", "extract", "none"] @@ -381,18 +416,6 @@ def _create_manifest_for_source_dir(): inlined_extra_attributes = { - # go - "cgo_library": { - "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), - "_compile_shared": compile_shared_attr, - "_coverage_mode": coverage_mode_attr, - "_cxx_toolchain": toolchains_common.cxx(), - "_exec_os_type": buck.exec_os_type_arg(), - "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), - "_go_toolchain": toolchains_common.go(), - "_race": race_attr, - "_tags": tags_attr, - }, # csharp "csharp_library": { "_csharp_toolchain": toolchains_common.csharp(), @@ -406,20 +429,33 @@ inlined_extra_attributes = { }, "cxx_library": { "auto_link_groups": attrs.bool(default = False), + # These flags will only be used to instrument a target + # when coverage for that target is enabled by `exported_needs_coverage_instrumentation` + # or by any of the target's dependencies. + "coverage_instrumentation_compiler_flags": attrs.list(attrs.string(), default = []), "deps_query": attrs.option(attrs.query(), default = None), + "exported_needs_coverage_instrumentation": attrs.bool(default = False), "extra_xcode_sources": attrs.list(attrs.source(allow_directory = True), default = []), "header_mode": attrs.option(attrs.enum(HeaderMode.values()), default = None), "link_deps_query_whole": attrs.bool(default = False), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "prefer_stripped_objects": attrs.bool(default = False), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum( + Linkage.values(), + default = "any", + doc = """ + Determines what linkage is used when the library is depended on by another target. To + control how the dependencies of this library are linked, use `link_style` instead. + """, + ), "resources": attrs.named_set(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), sorted = True, default = []), "supports_header_symlink_subtarget": attrs.bool(default = False), "supports_python_dlopen": attrs.option(attrs.bool(), default = None), "supports_shlib_interfaces": attrs.bool(default = True), + "_create_third_party_build_root": attrs.default_only(attrs.exec_dep(default = "prelude//third-party/tools:create_build")), "_cxx_hacks": attrs.default_only(attrs.dep(default = "prelude//cxx/tools:cxx_hacks")), "_cxx_toolchain": toolchains_common.cxx(), "_is_building_android_binary": is_building_android_binary_attr(), @@ -435,14 +471,24 @@ inlined_extra_attributes = { "go_binary": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "resources": attrs.list(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), default = []), + "_asan": asan_attr, + "_build_info": BUILD_INFO_ATTR, + "_cxx_toolchain": toolchains_common.cxx(), "_exec_os_type": buck.exec_os_type_arg(), "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), "_race": race_attr, "_tags": tags_attr, }, + "go_bootstrap_binary": { + "_exec_os_type": buck.exec_os_type_arg(), + "_go_bootstrap_toolchain": toolchains_common.go_bootstrap(), + }, "go_exported_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), + "_asan": asan_attr, + "_build_info": BUILD_INFO_ATTR, + "_cxx_toolchain": toolchains_common.cxx(), "_exec_os_type": buck.exec_os_type_arg(), "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), @@ -451,17 +497,20 @@ inlined_extra_attributes = { }, "go_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), + "_asan": asan_attr, "_cgo_enabled": cgo_enabled_attr, - "_compile_shared": compile_shared_attr, "_coverage_mode": coverage_mode_attr, + "_cxx_toolchain": toolchains_common.cxx(), + "_exec_os_type": buck.exec_os_type_arg(), "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), "_race": race_attr, "_tags": tags_attr, }, "go_stdlib": { + "_asan": asan_attr, "_cgo_enabled": cgo_enabled_attr, - "_compile_shared": compile_shared_attr, + "_cxx_toolchain": toolchains_common.cxx(), "_exec_os_type": buck.exec_os_type_arg(), "_go_toolchain": toolchains_common.go(), "_race": race_attr, @@ -471,13 +520,16 @@ inlined_extra_attributes = { "coverage_mode": attrs.option(attrs.enum(GoCoverageMode.values()), default = None), "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "resources": attrs.list(attrs.source(allow_directory = True), default = []), + "_asan": asan_attr, + "_build_info": BUILD_INFO_ATTR, "_coverage_mode": coverage_mode_attr, + "_cxx_toolchain": toolchains_common.cxx(), "_exec_os_type": buck.exec_os_type_arg(), "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), "_race": race_attr, "_tags": tags_attr, - "_testmaingen": attrs.default_only(attrs.exec_dep(default = "prelude//go/tools:testmaingen")), + "_testmaingen": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//go_bootstrap/tools:go_testmaingen")), }, # groovy @@ -489,7 +541,7 @@ inlined_extra_attributes = { }, "haskell_binary": { "auto_link_groups": attrs.bool(default = False), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "template_deps": attrs.list(attrs.exec_dep(providers = [HaskellLibraryProvider]), default = []), "_cxx_toolchain": toolchains_common.cxx(), "_haskell_toolchain": toolchains_common.haskell(), @@ -508,7 +560,7 @@ inlined_extra_attributes = { "_haskell_toolchain": toolchains_common.haskell(), }, "haskell_library": { - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "template_deps": attrs.list(attrs.exec_dep(providers = [HaskellLibraryProvider]), default = []), "_cxx_toolchain": toolchains_common.cxx(), "_haskell_toolchain": toolchains_common.haskell(), @@ -525,12 +577,20 @@ inlined_extra_attributes = { "linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "platform_header_dirs": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.source(allow_directory = True)))), default = None), "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum( + Linkage.values(), + default = "any", + doc = """ + Determines what linkage is used when the library is depended on by another target. To + control how the dependencies of this library are linked, use `link_style` instead. + """, + ), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "raw_headers": attrs.set(attrs.source(), sorted = True, default = []), "supports_python_dlopen": attrs.bool(default = True), "versioned_header_dirs": attrs.option(attrs.versioned(attrs.list(attrs.source(allow_directory = True))), default = None), + "_create_third_party_build_root": attrs.default_only(attrs.exec_dep(default = "prelude//third-party/tools:create_build")), "_cxx_toolchain": toolchains_common.cxx(), "_target_os_type": buck.target_os_type_arg(), }, @@ -541,6 +601,7 @@ inlined_extra_attributes = { #python "prebuilt_python_library": { "_create_manifest_for_source_dir": _create_manifest_for_source_dir(), + "_create_third_party_build_root": attrs.default_only(attrs.exec_dep(default = "prelude//third-party/tools:create_build")), "_extract": attrs.default_only(attrs.exec_dep(default = "prelude//python/tools:extract")), "_python_toolchain": toolchains_common.python(), }, @@ -568,6 +629,7 @@ inlined_extra_attributes = { "python_library": { "resources": attrs.named_set(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), sorted = True, default = []), "_create_manifest_for_source_dir": _create_manifest_for_source_dir(), + "_create_third_party_build_root": attrs.default_only(attrs.exec_dep(default = "prelude//third-party/tools:create_build")), "_cxx_toolchain": toolchains_common.cxx(), "_python_toolchain": toolchains_common.python(), }, @@ -577,7 +639,7 @@ inlined_extra_attributes = { labels = attrs.list(attrs.string(), default = []), needed_coverage = attrs.list(attrs.tuple(attrs.int(), attrs.dep(), attrs.option(attrs.string())), default = []), test = attrs.dep(providers = [ExternalRunnerTestInfo]), - **re_test_common.test_args() + **(re_test_common.test_args() | buck.inject_test_env_arg()) ), "python_test": _python_test_attrs(), "remote_file": { @@ -586,7 +648,7 @@ inlined_extra_attributes = { "_unzip_tool": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//zip_file/tools:unzip")), }, "rust_test": {}, - "sh_test": {}, + "sh_test": _constraint_overrides_attr(), "windows_resource": { "_cxx_toolchain": toolchains_common.cxx(), }, @@ -606,43 +668,27 @@ all_extra_attributes = _merge_dictionaries([ _zip_file_extra_attributes, ]) -# Inject test toolchain in all tests. - -for rule in [ - "sh_test", - "rust_test", - "python_test", - "python_needed_coverage_test", - "java_test", - "go_test", - "cxx_test", - "apple_test", - "android_instrumentation_test", - "kotlin_test", - "robolectric_test", - "julia_test", -]: - # NOTE: We make this a `dep` not an `exec_dep` even though we'll execute - # it, because it needs to execute in the same platform as the test itself - # (we run tests in the target platform not the exec platform, since the - # goal is to test the code that is being built!). - all_extra_attributes[rule] = _merge_dictionaries([all_extra_attributes[rule], { - "_inject_test_env": attrs.default_only(attrs.dep(default = "prelude//test/tools:inject_test_env")), - }]) - extra_attributes = struct(**all_extra_attributes) # Configuration transitions to pass `cfg` for builtin rules. transitions = { "android_binary": constraint_overrides_transition, + "apple_asset_catalog": apple_resource_transition, + "apple_binary": target_sdk_version_transition, + "apple_bundle": target_sdk_version_transition, + "apple_library": target_sdk_version_transition, "apple_resource": apple_resource_transition, + "apple_test": target_sdk_version_transition, "cxx_binary": constraint_overrides_transition, "cxx_test": constraint_overrides_transition, "go_binary": go_binary_transition, "go_exported_library": go_exported_library_transition, + "go_library": go_library_transition, + "go_stdlib": go_stdlib_transition, "go_test": go_test_transition, "python_binary": constraint_overrides_transition, "python_test": constraint_overrides_transition, + "sh_test": constraint_overrides_transition, } toolchain_rule_names = [ diff --git a/prelude/runtime/BUCK.v2 b/prelude/runtime/BUCK.v2 new file mode 100644 index 00000000000..a4b3167a3e0 --- /dev/null +++ b/prelude/runtime/BUCK.v2 @@ -0,0 +1,12 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + +# Used by open source projects to support `prelude//` + +config_setting( + name = "fbcode", + visibility = ["PUBLIC"], +) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index b903495bc08..2d312cb367d 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -7,7 +7,6 @@ load( "@prelude//:artifact_tset.bzl", - "ArtifactTSet", # @unused Used as a type "project_artifacts", ) load("@prelude//:local_only.bzl", "link_cxx_binary_locally") @@ -25,13 +24,15 @@ load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:dwp.bzl", "dwp", "dwp_available") load( "@prelude//cxx:linker.bzl", - "get_default_shared_library_name", "get_shared_library_name_linker_flags", ) load( "@prelude//linking:link_info.bzl", + "LibOutputStyle", # @unused Used as a type "LinkArgs", + "LinkInfos", # @unused Used as a type "LinkStrategy", # @unused Used as a type + "create_merged_link_info", "get_link_args_for_strategy", ) load( @@ -40,20 +41,23 @@ load( "traverse_shared_library_info", ) load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") -load("@prelude//utils:set.bzl", "set") load("@prelude//utils:utils.bzl", "flatten_dict") load( ":build_params.bzl", "BuildParams", # @unused Used as a type "CrateType", "Emit", + "MetadataKind", "crate_type_codegen", "crate_type_linked", - "emit_needs_codegen", + "dep_metadata_of_emit", "output_filename", ) +load(":clippy_configuration.bzl", "ClippyConfiguration") load( ":context.bzl", "CommonArgsInfo", @@ -61,10 +65,13 @@ load( "CrateName", # @unused Used as a type "DepCollectionContext", ) -load(":extern.bzl", "crate_map_arg", "extern_arg") +load( + ":extern.bzl", + "crate_map_arg", + "extern_arg", +) load( ":failure_filter.bzl", - "RustFailureFilter", "failure_filter", ) load( @@ -74,6 +81,7 @@ load( "RustLinkInfo", "attr_crate", "attr_simple_crate_for_filenames", + "attr_soname", "get_available_proc_macros", "inherited_external_debug_info", "inherited_merged_link_infos", @@ -83,21 +91,10 @@ load( "resolve_rust_deps", "strategy_info", ) +load(":outputs.bzl", "RustcOutput") load(":resources.bzl", "rust_attr_resources") load(":rust_toolchain.bzl", "PanicRuntime", "RustToolchainInfo") -RustcOutput = record( - output = field(Artifact), - stripped_output = field(Artifact), - diag = field(dict[str, Artifact]), - pdb = field([Artifact, None]), - dwp_output = field([Artifact, None]), - # Zero or more Split DWARF debug info files are emitted into this directory - # with unpredictable filenames. - dwo_output_directory = field([Artifact, None]), - extra_external_debug_info = field(list[ArtifactTSet]), -) - def compile_context(ctx: AnalysisContext) -> CompileContext: toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] cxx_toolchain_info = get_cxx_toolchain_info(ctx) @@ -143,6 +140,8 @@ def compile_context(ctx: AnalysisContext) -> CompileContext: empty_sysroot = ctx.actions.copied_dir("empty_sysroot", {"lib/rustlib/" + toolchain_info.rustc_target_triple + "/lib": empty_dir}) sysroot_args = cmd_args("--sysroot=", empty_sysroot, delimiter = "") + elif toolchain_info.sysroot_path: + sysroot_args = cmd_args("--sysroot=", toolchain_info.sysroot_path, delimiter = "") else: sysroot_args = cmd_args() @@ -176,16 +175,18 @@ def generate_rustdoc( dep_ctx = compile_ctx.dep_ctx, # to make sure we get the rmeta's generated for the crate dependencies, # rather than full .rlibs - emit = Emit("metadata"), + emit = Emit("metadata-fast"), params = params, default_roots = default_roots, + infallible_diagnostics = False, + incremental_enabled = False, is_rustdoc_test = False, ) subdir = common_args.subdir + "-rustdoc" output = ctx.actions.declare_output(subdir) - plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) + plain_env, path_env = process_env(compile_ctx, toolchain_info.rustdoc_env | ctx.attrs.env, exec_is_windows) plain_env["RUSTDOC_BUCK_TARGET"] = cmd_args(str(ctx.label.raw_target())) rustdoc_cmd = cmd_args( @@ -194,35 +195,12 @@ def generate_rustdoc( ctx.attrs.rustdoc_flags, common_args.args, cmd_args(output.as_output(), format = "--out-dir={}"), + hidden = [toolchain_info.rustdoc, compile_ctx.symlinked_srcs], ) if document_private_items: rustdoc_cmd.add("--document-private-items") - url_prefix = toolchain_info.extern_html_root_url_prefix - if url_prefix != None: - # Flag --extern-html-root-url used below is only supported on nightly. - plain_env["RUSTC_BOOTSTRAP"] = cmd_args("1") - rustdoc_cmd.add("-Zunstable-options") - - for dep in resolve_rust_deps(ctx, compile_ctx.dep_ctx): - if dep.label.cell != ctx.label.cell: - # TODO: support a different extern_html_root_url_prefix per cell - continue - - if dep.name: - name = normalize_crate(dep.name) - else: - # TODO: resolve this using dynamic (if set), see comment on D52476603 - name = dep.info.crate.simple - - rustdoc_cmd.add( - "--extern-html-root-url={}={}/{}:{}" - .format(name, url_prefix, dep.label.package, dep.label.name), - ) - - rustdoc_cmd.hidden(toolchain_info.rustdoc, compile_ctx.symlinked_srcs) - rustdoc_cmd_action = cmd_args( [cmd_args("--env=", k, "=", v, delimiter = "") for k, v in plain_env.items()], [cmd_args("--path-env=", k, "=", v, delimiter = "") for k, v in path_env.items()], @@ -255,9 +233,11 @@ def generate_rustdoc_coverage( dep_ctx = compile_ctx.dep_ctx, # to make sure we get the rmeta's generated for the crate dependencies, # rather than full .rlibs - emit = Emit("metadata"), + emit = Emit("metadata-fast"), params = params, default_roots = default_roots, + infallible_diagnostics = False, + incremental_enabled = False, is_rustdoc_test = False, ) @@ -274,7 +254,7 @@ def generate_rustdoc_coverage( ) exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" - plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) + plain_env, path_env = process_env(compile_ctx, ctx.attrs.env, exec_is_windows) plain_env["RUSTDOC_BUCK_TARGET"] = cmd_args(str(ctx.label.raw_target())) rustdoc_cmd_action = cmd_args( @@ -299,8 +279,8 @@ def generate_rustdoc_coverage( def generate_rustdoc_test( ctx: AnalysisContext, compile_ctx: CompileContext, - link_strategy: LinkStrategy, rlib: Artifact, + link_infos: dict[LibOutputStyle, LinkInfos], params: BuildParams, default_roots: list[str]) -> cmd_args: exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" @@ -326,14 +306,13 @@ def generate_rustdoc_test( ) # Gather and setup symlink tree of transitive shared library deps. - shared_libs = {} - if link_strategy == LinkStrategy("shared"): + shared_libs = [] + if params.dep_link_strategy == LinkStrategy("shared"): shlib_info = merge_shared_libraries( ctx.actions, deps = inherited_shared_libs(ctx, doc_dep_ctx), ) - for soname, shared_lib in traverse_shared_library_info(shlib_info).items(): - shared_libs[soname] = shared_lib.lib + shared_libs.extend(traverse_shared_library_info(shlib_info)) executable_args = executable_shared_lib_arguments( ctx, compile_ctx.cxx_toolchain_info, @@ -348,18 +327,30 @@ def generate_rustdoc_test( emit = Emit("link"), params = params, default_roots = default_roots, + infallible_diagnostics = False, is_rustdoc_test = True, + incremental_enabled = False, ) link_args_output = make_link_args( + ctx, ctx.actions, compile_ctx.cxx_toolchain_info, [ LinkArgs(flags = executable_args.extra_link_args), get_link_args_for_strategy( ctx, - inherited_merged_link_infos(ctx, doc_dep_ctx), - link_strategy, + # Since we pass the rlib in and treat it as a dependency to the rustdoc test harness, + # we need to ensure that the rlib's link info is added to the linker, otherwise we may + # end up with missing symbols that are defined within the crate. + [create_merged_link_info( + ctx, + compile_ctx.cxx_toolchain_info.pic_behavior, + link_infos, + deps = inherited_merged_link_infos(ctx, doc_dep_ctx).values(), + preferred_linkage = Linkage("static"), + )] + inherited_merged_link_infos(ctx, doc_dep_ctx).values(), + params.dep_link_strategy, ), ], "{}-{}".format(common_args.subdir, common_args.tempfile), @@ -378,8 +369,8 @@ def generate_rustdoc_test( else: runtool = ["--runtool=/usr/bin/env"] - plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) - doc_plain_env, doc_path_env = _process_env(compile_ctx, ctx.attrs.doc_env, exec_is_windows) + plain_env, path_env = process_env(compile_ctx, ctx.attrs.env, exec_is_windows) + doc_plain_env, doc_path_env = process_env(compile_ctx, ctx.attrs.doc_env, exec_is_windows) for k, v in doc_plain_env.items(): path_env.pop(k, None) plain_env[k] = v @@ -403,19 +394,18 @@ def generate_rustdoc_test( common_args.args, extern_arg([], attr_crate(ctx), rlib), "--extern=proc_macro" if ctx.attrs.proc_macro else [], - compile_ctx.linker_args, + cmd_args(compile_ctx.linker_args, format = "-Clinker={}"), cmd_args(linker_argsfile, format = "-Clink-arg=@{}"), runtool, cmd_args(toolchain_info.rustdoc_test_with_resources, format = "--runtool-arg={}"), cmd_args("--runtool-arg=--resources=", resources, delimiter = ""), "--color=always", "--test-args=--color=always", - ) - - rustdoc_cmd.hidden( - compile_ctx.symlinked_srcs, - link_args_output.hidden, - executable_args.runtime_files, + hidden = [ + compile_ctx.symlinked_srcs, + link_args_output.hidden, + executable_args.runtime_files, + ], ) return _long_command( @@ -425,40 +415,6 @@ def generate_rustdoc_test( argfile_name = "{}.args".format(common_args.subdir), ) -# Generate multiple compile artifacts so that distinct sets of artifacts can be -# generated concurrently. -def rust_compile_multi( - ctx: AnalysisContext, - compile_ctx: CompileContext, - emits: list[Emit], - params: BuildParams, - default_roots: list[str], - extra_link_args: list[typing.Any] = [], - predeclared_outputs: dict[Emit, Artifact] = {}, - extra_flags: list[[str, ResolvedStringWithMacros]] = [], - is_binary: bool = False, - allow_cache_upload: bool = False, - rust_cxx_link_group_info: [RustCxxLinkGroupInfo, None] = None) -> list[RustcOutput]: - outputs = [] - - for emit in emits: - outs = rust_compile( - ctx = ctx, - compile_ctx = compile_ctx, - emit = emit, - params = params, - default_roots = default_roots, - extra_link_args = extra_link_args, - predeclared_outputs = predeclared_outputs, - extra_flags = extra_flags, - is_binary = is_binary, - allow_cache_upload = allow_cache_upload, - rust_cxx_link_group_info = rust_cxx_link_group_info, - ) - outputs.append(outs) - - return outputs - # Generate a compilation action. A single instance of rustc can emit # numerous output artifacts, so return an artifact object for each of # them. @@ -468,17 +424,32 @@ def rust_compile( emit: Emit, params: BuildParams, default_roots: list[str], + incremental_enabled: bool, extra_link_args: list[typing.Any] = [], - predeclared_outputs: dict[Emit, Artifact] = {}, + predeclared_output: Artifact | None = None, extra_flags: list[[str, ResolvedStringWithMacros]] = [], - is_binary: bool = False, allow_cache_upload: bool = False, + # Setting this to true causes the diagnostic outputs that are generated + # from this action to always be successfully generated, even if + # compilation fails. This should not generally be used if the "real" + # output of the action is going to be depended on + infallible_diagnostics: bool = False, rust_cxx_link_group_info: [RustCxxLinkGroupInfo, None] = None) -> RustcOutput: exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" toolchain_info = compile_ctx.toolchain_info - lints, clippy_lints = _lint_flags(compile_ctx) + lints = _lint_flags(compile_ctx, infallible_diagnostics, emit == Emit("clippy")) + + # If we are building metadata-full for a dylib target, we want the hollow-rlib version of rmeta, not the shared lib version. + if compile_ctx.dep_ctx.advanced_unstable_linking and emit == Emit("metadata-full") and params.crate_type == CrateType("dylib"): + params = BuildParams( + crate_type = CrateType("rlib"), + reloc_model = params.reloc_model, + dep_link_strategy = params.dep_link_strategy, + prefix = "lib", + suffix = ".rlib", + ) common_args = _compute_common_args( ctx = ctx, @@ -487,9 +458,45 @@ def rust_compile( emit = emit, params = params, default_roots = default_roots, + infallible_diagnostics = infallible_diagnostics, + incremental_enabled = incremental_enabled, is_rustdoc_test = False, ) + deferred_link_cmd = None + + # TODO(pickett): We can expand this to support all linked crate types (cdylib + binary) + # We can also share logic here for producing linked artifacts with cxx_library (instead of using) + # deferred_link_action + if params.crate_type == CrateType("dylib") and emit == Emit("link") and compile_ctx.dep_ctx.advanced_unstable_linking: + out_argsfile = ctx.actions.declare_output(common_args.subdir + "/extracted-link-args.args") + out_version_script = ctx.actions.declare_output(common_args.subdir + "/version-script") + out_objects_dir = ctx.actions.declare_output(common_args.subdir + "/objects", dir = True) + linker_cmd = cmd_args( + toolchain_info.extract_link_action, + cmd_args(out_argsfile.as_output(), format = "--out_argsfile={}"), + cmd_args(out_version_script.as_output(), format = "--out_version-script={}") if out_version_script else cmd_args(), + cmd_args(out_objects_dir.as_output(), format = "--out_objects={}"), + compile_ctx.linker_args, + ) + + linker_args = cmd_script( + ctx = ctx, + name = common_args.subdir + "/linker_wrapper", + cmd = linker_cmd, + os = ScriptOs("windows" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "unix"), + ) + + deferred_link_cmd = cmd_args( + toolchain_info.deferred_link_action, + cmd_args(out_objects_dir, format = "--objects={}"), + cmd_args(out_version_script, format = "--version-script={}"), + compile_ctx.linker_args, + cmd_args(out_argsfile, format = "@{}"), + ) + else: + linker_args = compile_ctx.linker_args + path_sep = "\\" if exec_is_windows else "/" rustc_cmd = cmd_args( # Lints go first to allow other args to override them. @@ -498,38 +505,60 @@ def rust_compile( ["--json=unused-externs-silent", "-Wunused-crate-dependencies"] if toolchain_info.report_unused_deps else [], common_args.args, cmd_args("--remap-path-prefix=", compile_ctx.symlinked_srcs, path_sep, "=", ctx.label.path, path_sep, delimiter = ""), - compile_ctx.linker_args, + ["-Zremap-cwd-prefix=."] if toolchain_info.nightly_features else [], + cmd_args(linker_args, format = "-Clinker={}"), extra_flags, ) + rustc_bin = compile_ctx.clippy_wrapper if emit == Emit("clippy") else toolchain_info.compiler + # If we're using failure filtering then we need to make sure the final # artifact location is the predeclared one since its specific path may have # already been encoded into the other compile args (eg rpath). So we still # let rustc_emit generate its own output artifacts, and then make sure we # use the predeclared one as the output after the failure filter action # below. Otherwise we'll use the predeclared outputs directly. - if toolchain_info.failure_filter: + if infallible_diagnostics: emit_op = _rustc_emit( ctx = ctx, - compile_ctx = compile_ctx, emit = emit, - predeclared_outputs = {}, subdir = common_args.subdir, params = params, + incremental_enabled = incremental_enabled, ) else: emit_op = _rustc_emit( ctx = ctx, - compile_ctx = compile_ctx, emit = emit, - predeclared_outputs = predeclared_outputs, subdir = common_args.subdir, params = params, + predeclared_output = predeclared_output, + incremental_enabled = incremental_enabled, + deferred_link = deferred_link_cmd != None, ) + if emit == Emit("clippy"): + clippy_toml = None + if ctx.attrs.clippy_configuration: + clippy_toml = ctx.attrs.clippy_configuration[ClippyConfiguration].clippy_toml + elif toolchain_info.clippy_toml: + clippy_toml = toolchain_info.clippy_toml + + if clippy_toml: + # Clippy wants to be given a path to a directory containing a + # clippy.toml (or .clippy.toml). Our buckconfig accepts an arbitrary + # label like //path/to:my-clippy.toml which may not have the + # filename that clippy looks for. Here we make a directory that + # symlinks the requested configuration file under the required name. + clippy_conf_dir = ctx.actions.symlinked_dir( + common_args.subdir + "-clippy-configuration", + {"clippy.toml": clippy_toml}, + ) + emit_op.env["CLIPPY_CONF_DIR"] = clippy_conf_dir + pdb_artifact = None dwp_inputs = [] - if crate_type_linked(params.crate_type) and not common_args.is_check: + if crate_type_linked(params.crate_type) and common_args.emit_requires_linking: subdir = common_args.subdir tempfile = common_args.tempfile @@ -547,11 +576,12 @@ def rust_compile( inherited_merged_link_infos( ctx, compile_ctx.dep_ctx, - ), + ).values(), params.dep_link_strategy, ) link_args_output = make_link_args( + ctx, ctx.actions, compile_ctx.cxx_toolchain_info, [ @@ -569,80 +599,48 @@ def rust_compile( pdb_artifact = link_args_output.pdb_artifact dwp_inputs = [link_args_output.link_args] - rustc_cmd.add(cmd_args(linker_argsfile, format = "-Clink-arg=@{}")) - rustc_cmd.hidden(link_args_output.hidden) - (diag, build_status) = _rustc_invoke( + # If we are deferring the real link to a separate action, we no longer pass the linker + # argsfile to rustc. This allows the rustc action to complete with only transitive dep rmeta. + if deferred_link_cmd != None: + deferred_link_cmd.add(cmd_args(linker_argsfile, format = "@{}")) + deferred_link_cmd.add(cmd_args(hidden = link_args_output.hidden)) + + # The -o flag passed to the linker by rustc is a temporary file. So we will strip it + # out in `extract_link_action.py` and provide our own output path here. + deferred_link_cmd.add(cmd_args(emit_op.output.as_output(), format = "-o {}")) + else: + rustc_cmd.add(cmd_args(linker_argsfile, format = "-Clink-arg=@{}")) + rustc_cmd.add(cmd_args(hidden = link_args_output.hidden)) + + invoke = _rustc_invoke( ctx = ctx, compile_ctx = compile_ctx, + common_args = common_args, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), - rustc_cmd = cmd_args(toolchain_info.compiler, rustc_cmd, emit_op.args), - diag = "diag", + rustc_cmd = cmd_args(rustc_bin, rustc_cmd, emit_op.args), required_outputs = [emit_op.output], - short_cmd = common_args.short_cmd, - is_binary = is_binary, - allow_cache_upload = allow_cache_upload, + is_clippy = emit.value == "clippy", + infallible_diagnostics = infallible_diagnostics, + allow_cache_upload = allow_cache_upload and emit != Emit("clippy"), crate_map = common_args.crate_map, env = emit_op.env, + incremental_enabled = incremental_enabled, + deferred_link_cmd = deferred_link_cmd, ) - # Add clippy diagnostic targets for check builds - if common_args.is_check: - # We don't really need the outputs from this build, just to keep the artifact accounting straight - clippy_emit_op = _rustc_emit( - ctx = ctx, - compile_ctx = compile_ctx, - emit = emit, - predeclared_outputs = {}, - subdir = common_args.subdir + "-clippy", - params = params, - ) - clippy_env = clippy_emit_op.env - if toolchain_info.clippy_toml: - # Clippy wants to be given a path to a directory containing a - # clippy.toml (or .clippy.toml). Our buckconfig accepts an arbitrary - # label like //path/to:my-clippy.toml which may not have the - # filename that clippy looks for. Here we make a directory that - # symlinks the requested configuration file under the required name. - clippy_conf_dir = ctx.actions.symlinked_dir( - common_args.subdir + "-clippy-configuration", - {"clippy.toml": toolchain_info.clippy_toml}, - ) - clippy_env["CLIPPY_CONF_DIR"] = clippy_conf_dir - (clippy_diag, _) = _rustc_invoke( - ctx = ctx, - compile_ctx = compile_ctx, - prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), - # Lints go first to allow other args to override them. - rustc_cmd = cmd_args(compile_ctx.clippy_wrapper, clippy_lints, rustc_cmd, clippy_emit_op.args), - env = clippy_env, - diag = "clippy", - required_outputs = [clippy_emit_op.output], - short_cmd = common_args.short_cmd, - is_binary = False, - allow_cache_upload = False, - crate_map = common_args.crate_map, - ) - diag.update(clippy_diag) - - if toolchain_info.failure_filter: + if infallible_diagnostics and emit != Emit("clippy"): # This is only needed when this action's output is being used as an # input, so we only need standard diagnostics (clippy is always # asked for explicitly). - stderr = diag["diag.txt"] - filter_prov = RustFailureFilter( - buildstatus = build_status, - required = emit_op.output, - stderr = stderr, - ) - filtered_output = failure_filter( ctx = ctx, compile_ctx = compile_ctx, - prefix = "{}/{}".format(common_args.subdir, emit.value), - predecl_out = predeclared_outputs.get(emit), - failprov = filter_prov, - short_cmd = common_args.short_cmd, + predeclared_output = predeclared_output, + build_status = invoke.build_status, + required = emit_op.output, + stderr = invoke.diag_txt, + identifier = invoke.identifier, ) else: filtered_output = emit_op.output @@ -676,7 +674,9 @@ def rust_compile( dwo_output_directory = None extra_external_debug_info = [] - if is_binary and dwp_available(compile_ctx.cxx_toolchain_info): + if params.crate_type == CrateType("bin") and \ + emit == Emit("link") and \ + dwp_available(compile_ctx.cxx_toolchain_info): dwp_output = dwp( ctx, compile_ctx.cxx_toolchain_info, @@ -705,7 +705,8 @@ def rust_compile( return RustcOutput( output = filtered_output, stripped_output = stripped_output, - diag = diag, + diag_txt = invoke.diag_txt, + diag_json = invoke.diag_json, pdb = pdb_artifact, dwp_output = dwp_output, dwo_output_directory = dwo_output_directory, @@ -715,18 +716,22 @@ def rust_compile( # --extern = for direct dependencies # -Ldependency= for transitive dependencies # For native dependencies, we use -Clink-arg=@argsfile -# Second element of result tuple is a list of files/directories that should be present for executable to be run successfully -# Third return is the mapping from crate names back to targets (needed so that a deps linter knows what deps need fixing) # -# The `compile_ctx` may be omitted if `is_check` is `True` and there are no dependencies with dynamic crate names +# Second element of returned tuple is a mapping from crate names back to target +# label, needed for applying autofixes for rustc's unused_crate_dependencies +# lint by tracing Rust crate names in the compiler diagnostic back to which +# dependency entry in the BUCK file needs to be removed. +# +# The `compile_ctx` may be omitted if there are no dependencies with dynamic +# crate names. def dependency_args( ctx: AnalysisContext, compile_ctx: CompileContext | None, + toolchain_info: RustToolchainInfo, deps: list[RustDependency], subdir: str, - crate_type: CrateType, dep_link_strategy: LinkStrategy, - is_check: bool, + dep_metadata_kind: MetadataKind, is_rustdoc_test: bool) -> (cmd_args, list[(CrateName, Label)]): args = cmd_args() transitive_deps = {} @@ -741,44 +746,24 @@ def dependency_args( else: crate = dep.info.crate - strategy = strategy_info(dep.info, dep_link_strategy) + strategy = strategy_info(toolchain_info, dep.info, dep_link_strategy) - # With `advanced_unstable_linking`, we unconditionally pass the metadata - # artifacts. There are two things that work together to make this possible - # in the case of binaries: - # - # 1. The actual rlibs appear in the link providers, so they'll still be - # available for the linker to link in - # 2. The metadata artifacts aren't rmetas, but rather rlibs that just - # don't contain any generated code. Rustc can't distinguish these - # from real rlibs, and so doesn't throw an error - # - # The benefit of doing this is that there's no requirment that the - # dependency's generated code be provided to the linker via an rlib. It - # could be provided by other means, say, a link group - use_rmeta = is_check or compile_ctx.dep_ctx.advanced_unstable_linking or (compile_ctx.toolchain_info.pipelined and not crate_type_codegen(crate_type) and not is_rustdoc_test) - - # Use rmeta dependencies whenever possible because they - # should be cheaper to produce. - if use_rmeta: - artifact = strategy.rmeta - transitive_artifacts = strategy.transitive_rmeta_deps - else: - artifact = strategy.rlib - transitive_artifacts = strategy.transitive_deps + artifact = strategy.outputs[dep_metadata_kind] + transitive_artifacts = strategy.transitive_deps[dep_metadata_kind] for marker in strategy.transitive_proc_macro_deps.keys(): info = available_proc_macros[marker.label][RustLinkInfo] - strategy = strategy_info(info, dep_link_strategy) - transitive_deps[strategy.rmeta if use_rmeta else strategy.rlib] = info.crate + strategy = strategy_info(toolchain_info, info, dep_link_strategy) + transitive_deps[strategy.outputs[MetadataKind("link")]] = info.crate args.add(extern_arg(dep.flags, crate, artifact)) crate_targets.append((crate, dep.label)) - # Because deps of this *target* can also be transitive deps of this compiler - # invocation, pass the artifact through `-L` unconditionally for doc tests. + # Because deps of this *target* can also be transitive deps of this + # compiler invocation, pass the artifact (under its original crate name) + # through `-L` unconditionally for doc tests. if is_rustdoc_test: - transitive_deps[artifact] = crate + transitive_deps[artifact] = dep.info.crate # Unwanted transitive_deps have already been excluded transitive_deps.update(transitive_artifacts) @@ -791,7 +776,7 @@ def dependency_args( else: simple_artifacts[artifact] = None - prefix = "{}-deps{}".format(subdir, "-check" if is_check else "") + prefix = "{}-deps{}".format(subdir, dep_metadata_kind.value) if simple_artifacts: args.add(simple_symlinked_dirs(ctx, prefix, simple_artifacts)) if dynamic_artifacts: @@ -829,9 +814,12 @@ def dynamic_symlinked_dirs( # Pass the list of rlibs to transitive_dependency_symlinks.py through a file # because there can be a lot of them. This avoids running out of command # line length, particularly on Windows. - relative_path = lambda artifact: (cmd_args(artifact, delimiter = "") - .relative_to(transitive_dependency_dir.project("i")) - .ignore_artifacts()) + relative_path = lambda artifact: cmd_args( + artifact, + delimiter = "", + ignore_artifacts = True, + relative_to = transitive_dependency_dir.project("i"), + ) artifacts_json = ctx.actions.write_json( ctx.actions.declare_output("{}-dyn.json".format(prefix)), [ @@ -848,12 +836,12 @@ def dynamic_symlinked_dirs( cmd_args(transitive_dependency_dir.as_output(), format = "--out-dir={}"), cmd_args(artifacts_json, format = "--artifacts={}"), ], - category = "tdep_symlinks", + category = "deps", identifier = str(len(compile_ctx.transitive_dependency_dirs)), ) compile_ctx.transitive_dependency_dirs[transitive_dependency_dir] = None - return cmd_args(transitive_dependency_dir, format = "@{}/dirs").hidden(artifacts.keys()) + return cmd_args(transitive_dependency_dir, format = "@{}/dirs", hidden = artifacts.keys()) def _lintify(flag: str, clippy: bool, lints: list[ResolvedStringWithMacros]) -> cmd_args: return cmd_args( @@ -861,23 +849,16 @@ def _lintify(flag: str, clippy: bool, lints: list[ResolvedStringWithMacros]) -> format = "-{}{{}}".format(flag), ) -def _lint_flags(compile_ctx: CompileContext) -> (cmd_args, cmd_args): +def _lint_flags(compile_ctx: CompileContext, infallible_diagnostics: bool, is_clippy: bool) -> cmd_args: toolchain_info = compile_ctx.toolchain_info - plain = cmd_args( - _lintify("A", False, toolchain_info.allow_lints), - _lintify("D", False, toolchain_info.deny_lints), - _lintify("W", False, toolchain_info.warn_lints), - ) - - clippy = cmd_args( - _lintify("A", True, toolchain_info.allow_lints), - _lintify("D", True, toolchain_info.deny_lints), - _lintify("W", True, toolchain_info.warn_lints), + return cmd_args( + _lintify("A", is_clippy, toolchain_info.allow_lints), + _lintify("D", is_clippy, toolchain_info.deny_lints), + _lintify("D" if infallible_diagnostics else "W", is_clippy, toolchain_info.deny_on_check_lints), + _lintify("W", is_clippy, toolchain_info.warn_lints), ) - return (plain, clippy) - def _rustc_flags(flags: list[[str, ResolvedStringWithMacros]]) -> list[[str, ResolvedStringWithMacros]]: # Rustc's "-g" flag is documented as being exactly equivalent to # "-Cdebuginfo=2". Rustdoc supports the latter, it just doesn't have the @@ -896,13 +877,15 @@ def _compute_common_args( emit: Emit, params: BuildParams, default_roots: list[str], + infallible_diagnostics: bool, + incremental_enabled: bool, is_rustdoc_test: bool) -> CommonArgsInfo: exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" path_sep = "\\" if exec_is_windows else "/" crate_type = params.crate_type - args_key = (crate_type, emit, params.dep_link_strategy, is_rustdoc_test) + args_key = (crate_type, emit, params.dep_link_strategy, is_rustdoc_test, infallible_diagnostics, incremental_enabled) if args_key in compile_ctx.common_args: return compile_ctx.common_args[args_key] @@ -910,36 +893,56 @@ def _compute_common_args( subdir = "{}-{}-{}-{}".format(crate_type.value, params.reloc_model.value, params.dep_link_strategy.value, emit.value) if is_rustdoc_test: subdir = "{}-rustdoc-test".format(subdir) + if infallible_diagnostics: + subdir = "{}-diag".format(subdir) + if incremental_enabled: + subdir = "{}-incr".format(subdir) # Included in tempfiles tempfile = "{}-{}".format(attr_simple_crate_for_filenames(ctx), emit.value) - srcs = ctx.attrs.srcs - mapped_srcs = ctx.attrs.mapped_srcs - all_srcs = map(lambda s: s.short_path, srcs) + mapped_srcs.values() - crate_root = ctx.attrs.crate_root or _crate_root(ctx, all_srcs, default_roots) + root = crate_root(ctx, default_roots) if exec_is_windows: - crate_root = crate_root.replace("/", "\\") + root = root.replace("/", "\\") - is_check = not emit_needs_codegen(emit) + # With `advanced_unstable_linking`, we unconditionally pass the metadata + # artifacts. There are two things that work together to make this possible + # in the case of binaries: + # + # 1. The actual rlibs appear in the link providers, so they'll still be + # available for the linker to link in + # 2. The metadata artifacts aren't rmetas, but rather rlibs that just + # don't contain any generated code. Rustc can't distinguish these + # from real rlibs, and so doesn't throw an error + # + # The benefit of doing this is that there's no requirement that the + # dependency's generated code be provided to the linker via an rlib. It + # could be provided by other means, say, a link group + dep_metadata_kind = dep_metadata_of_emit(emit) + + # FIXME(JakobDegen): This computation is an awfully broad over-approximation + emit_requires_linking = dep_metadata_kind == MetadataKind("link") + if compile_ctx.dep_ctx.advanced_unstable_linking or not crate_type_codegen(crate_type): + if dep_metadata_kind == MetadataKind("link"): + dep_metadata_kind = MetadataKind("full") dep_args, crate_map = dependency_args( ctx = ctx, compile_ctx = compile_ctx, + toolchain_info = compile_ctx.toolchain_info, deps = resolve_rust_deps(ctx, dep_ctx), subdir = subdir, - crate_type = crate_type, dep_link_strategy = params.dep_link_strategy, - is_check = is_check, + dep_metadata_kind = dep_metadata_kind, is_rustdoc_test = is_rustdoc_test, ) if crate_type == CrateType("proc-macro"): dep_args.add("--extern=proc_macro") - if crate_type in [CrateType("cdylib"), CrateType("dylib")] and not is_check: + if crate_type in [CrateType("cdylib"), CrateType("dylib")] and emit_requires_linking: linker_info = compile_ctx.cxx_toolchain_info.linker_info - shlib_name = get_default_shared_library_name(linker_info, ctx.label) + shlib_name = attr_soname(ctx) dep_args.add(cmd_args( get_shared_library_name_linker_flags(linker_info.type, shlib_name), format = "-Clink-arg={}", @@ -955,6 +958,46 @@ def _compute_common_args( else: crate_name_arg = cmd_args("--crate-name=", crate.simple, delimiter = "") + # The `-Cprefer-dynamic` flag controls rustc's choice of artifacts for + # transitive dependencies, both for loading metadata and linking them. + # Direct dependencies are given to rustc one-by-one using `--extern` with a + # path to a specific artifact, so there is never ambiguity what artifact to + # use for a direct dependency. But transitive dependencies are passed in + # bulk via zero or more `-Ldependency` flags, which are directories + # containing artifacts. Within those directories, information about a + # specific crate might be available from more than one artifact, such as a + # dylib and rlib for the same crate. + # + # With `-Cprefer-dynamic=no` (the default), when a transitive dependency + # exists as both rlib and dylib, metadata is loaded from the rlib. If some + # dependencies are available in dylib but not rlib, the dylib is used for + # those. With `-Cprefer-dynamic=yes`, when a transitive dependency exists as + # both rlib and dylib, instead the dylib is used. + # + # The ambiguity over whether to use rlib or dylib for a particular + # transitive dependency only occurs if the rlib and dylib both describe the + # same crate i.e. contain the same crate hash. + # + # Buck-built libraries never produce an rlib and dylib containing the same + # crate hash, since that only occurs when outputting multiple crate types + # through a single rustc invocation: `--crate-type=rlib --crate-type=dylib`. + # In Buck, different crate types are built by different rustc invocations. + # But Cargo does invoke rustc with multiple crate types when you write + # `[lib] crate-type = ["rlib", "dylib"]` in Cargo.toml, and in fact the + # standard libraries built by x.py and distributed by Rustup are built this + # way. + if toolchain_info.explicit_sysroot_deps: + # Standard libraries are being passed explicitly, and Buck-built + # dependencies never collide on crate hash, so `-Cprefer-dynamic` cannot + # make a difference. + prefer_dynamic_flags = [] + elif crate_type == CrateType("dylib") and toolchain_info.advanced_unstable_linking: + # Use standard library dylibs from the implicit sysroot. + prefer_dynamic_flags = ["-Cprefer-dynamic=yes"] + else: + # Use standard library rlibs from the implicit sysroot. + prefer_dynamic_flags = ["-Cprefer-dynamic=no"] # (the default) + split_debuginfo_flags = { # Rustc's default behavior: debug info is put into every rlib and # staticlib, then copied into the executables and shared libraries by @@ -998,7 +1041,7 @@ def _compute_common_args( }[compile_ctx.cxx_toolchain_info.split_debug_mode or SplitDebugMode("none")] args = cmd_args( - cmd_args(compile_ctx.symlinked_srcs, path_sep, crate_root, delimiter = ""), + cmd_args(compile_ctx.symlinked_srcs, path_sep, root, delimiter = ""), crate_name_arg, "--crate-type={}".format(crate_type.value), "-Crelocation-model={}".format(params.reloc_model.value), @@ -1006,15 +1049,18 @@ def _compute_common_args( "-Cmetadata={}".format(_metadata(ctx.label, is_rustdoc_test)[0]), # Make diagnostics json with the option to extract rendered text ["--error-format=json", "--json=diagnostic-rendered-ansi"] if not is_rustdoc_test else [], - ["-Cprefer-dynamic=yes"] if crate_type == CrateType("dylib") else [], + prefer_dynamic_flags, ["--target={}".format(toolchain_info.rustc_target_triple)] if toolchain_info.rustc_target_triple else [], split_debuginfo_flags, compile_ctx.sysroot_args, ["-Cpanic=abort", "-Zpanic-abort-tests=yes"] if toolchain_info.panic_runtime == PanicRuntime("abort") else [], _rustc_flags(toolchain_info.rustc_flags), - _rustc_flags(toolchain_info.rustc_check_flags) if is_check else [], + # `rustc_check_flags` is specifically interpreted as flags that are used + # only on the metadata-fast graph. + _rustc_flags(toolchain_info.rustc_check_flags) if dep_metadata_kind == MetadataKind("fast") else [], _rustc_flags(toolchain_info.rustc_coverage_flags) if ctx.attrs.coverage else [], _rustc_flags(ctx.attrs.rustc_flags), + _rustc_flags(toolchain_info.extra_rustc_flags), cmd_args(ctx.attrs.features, format = '--cfg=feature="{}"'), dep_args, ) @@ -1023,8 +1069,10 @@ def _compute_common_args( args = args, subdir = subdir, tempfile = tempfile, - short_cmd = "{},{},{}".format(crate_type.value, params.reloc_model.value, emit.value), - is_check = is_check, + crate_type = crate_type, + params = params, + emit = emit, + emit_requires_linking = emit_requires_linking, crate_map = crate_map, ) @@ -1044,7 +1092,7 @@ def _clippy_wrapper( if toolchain_info.rustc_target_triple: rustc_print_sysroot.add("--target={}".format(toolchain_info.rustc_target_triple)) - skip_setting_sysroot = toolchain_info.explicit_sysroot_deps != None + skip_setting_sysroot = toolchain_info.explicit_sysroot_deps != None or toolchain_info.sysroot_path != None if ctx.attrs._exec_os_type[OsLookup].platform == "windows": wrapper_file, _ = ctx.actions.write( @@ -1074,7 +1122,7 @@ def _clippy_wrapper( allow_args = True, ) - return cmd_args(wrapper_file).hidden(clippy_driver, rustc_print_sysroot) + return cmd_args(wrapper_file, hidden = [clippy_driver, rustc_print_sysroot]) # This is a hack because we need to pass the linker to rustc # using -Clinker=path and there is currently no way of doing this @@ -1089,15 +1137,13 @@ def _linker_args( ctx.attrs.linker_flags, ) - linker_wrapper = cmd_script( + return cmd_script( ctx = ctx, name = "linker_wrapper", cmd = linker, os = ScriptOs("windows" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "unix"), ) - return cmd_args(linker_wrapper, format = "-Clinker={}") - # Returns the full label and its hash. The full label is used for `-Cmetadata` # which provided the primary disambiguator for two otherwise identically named # crates. The hash is added to the filename to give them a lower likelihood of @@ -1112,10 +1158,14 @@ def _metadata(label: Label, is_rustdoc_test: bool) -> (str, str): h = "%x" % h return (label, "0" * (8 - len(h)) + h) -def _crate_root( +def crate_root( ctx: AnalysisContext, - srcs: list[str], default_roots: list[str]) -> str: + if ctx.attrs.crate_root: + return ctx.attrs.crate_root + + srcs = [s.short_path for s in ctx.attrs.srcs] + ctx.attrs.mapped_srcs.values() + candidates = set() if getattr(ctx.attrs, "crate_dynamic", None): crate_with_suffix = None @@ -1126,10 +1176,56 @@ def _crate_root( if filename in default_roots or filename == crate_with_suffix: candidates.add(src) - if candidates.size() == 1: - return candidates.list()[0] + if len(candidates) == 1: + return candidates.pop() + + fail("Could not infer crate_root." + + "\nMake sure you have one of {} in your `srcs` attribute.".format(default_roots) + + "\nOr add 'crate_root = \"src/example.rs\"' to your attributes to disambiguate. candidates={}".format(candidates)) + +def _explain(crate_type: CrateType, link_strategy: LinkStrategy, emit: Emit, infallible_diagnostics: bool) -> str: + if emit == Emit("metadata-full"): + link_strategy_suffix = { + LinkStrategy("static"): " [static]", + LinkStrategy("static_pic"): " [pic]", + LinkStrategy("shared"): " [shared]", + }[link_strategy] + return "metadata" + link_strategy_suffix + + if emit == Emit("metadata-fast"): + return "diag" if infallible_diagnostics else "check" + + if emit == Emit("link"): + link_strategy_suffix = { + LinkStrategy("static"): "", + LinkStrategy("static_pic"): " [pic]", + LinkStrategy("shared"): " [shared]", + }[link_strategy] + if crate_type == CrateType("bin"): + return "link" + link_strategy_suffix + if crate_type == CrateType("rlib"): + return "rlib" + link_strategy_suffix + if crate_type == CrateType("dylib"): + return "dylib" + link_strategy_suffix + if crate_type == CrateType("proc-macro"): + return "proc-macro" # always static_pic + if crate_type == CrateType("cdylib"): + return "cdylib" + link_strategy_suffix + if crate_type == CrateType("staticlib"): + return "staticlib" + link_strategy_suffix + + if emit == Emit("expand"): + return "expand" - fail("Could not infer crate_root. candidates=%s\nAdd 'crate_root = \"src/example.rs\"' to your attributes to disambiguate." % candidates.list()) + if emit == Emit("llvm-ir"): + link_strategy_suffix = { + LinkStrategy("static"): " [static]", + LinkStrategy("static_pic"): " [pic]", + LinkStrategy("shared"): " [shared]", + }[link_strategy] + return "llvm-ir" + link_strategy_suffix + + fail("unrecognized rustc action:", crate_type, link_strategy, emit) EmitOperation = record( output = field(Artifact), @@ -1141,40 +1237,25 @@ EmitOperation = record( # Take a desired output and work out how to convince rustc to generate it def _rustc_emit( ctx: AnalysisContext, - compile_ctx: CompileContext, emit: Emit, - predeclared_outputs: dict[Emit, Artifact], subdir: str, - params: BuildParams) -> EmitOperation: - toolchain_info = compile_ctx.toolchain_info + params: BuildParams, + incremental_enabled: bool, + predeclared_output: Artifact | None = None, + deferred_link: bool = False) -> EmitOperation: simple_crate = attr_simple_crate_for_filenames(ctx) crate_type = params.crate_type - # Metadata for pipelining needs has enough info to be used as an input - # for dependents. To do this reliably, we actually emit "link" but - # suppress actual codegen with -Zno-codegen. - # - # We don't bother to do this with "codegen" crates - ie, ones which are - # linked into an artifact like binaries and dylib, since they're not - # used as a pipelined dependency input. - pipeline_meta = emit == Emit("metadata") and \ - toolchain_info.pipelined and \ - not crate_type_codegen(crate_type) - emit_args = cmd_args() emit_env = {} extra_out = None - if emit in predeclared_outputs: - emit_output = predeclared_outputs[emit] + if predeclared_output: + emit_output = predeclared_output else: extra_hash = "-" + _metadata(ctx.label, False)[1] emit_args.add("-Cextra-filename={}".format(extra_hash)) - if pipeline_meta: - # Make sure hollow rlibs are distinct from real ones - filename = subdir + "/hollow/" + output_filename(simple_crate, Emit("link"), params, extra_hash) - else: - filename = subdir + "/" + output_filename(simple_crate, emit, params, extra_hash) + filename = subdir + "/" + output_filename(simple_crate, emit, params, extra_hash) emit_output = ctx.actions.declare_output(filename) @@ -1185,27 +1266,40 @@ def _rustc_emit( cmd_args(emit_output.as_output(), format = "-o{}"), ) else: - if toolchain_info.pipelined: - # Even though the unstable flag only appears on one of the branches, we need - # an identical environment between the `-Zno-codegen` and non-`-Zno-codegen` - # command or else there are "found possibly newer version of crate" errors. - emit_env["RUSTC_BOOTSTRAP"] = "1" - - if pipeline_meta: - # If we're doing a pipelined build, instead of emitting an actual rmeta - # we emit a "hollow" .rlib - ie, it only contains lib.rmeta and no object - # code. It should contain full information needed by any dependent - # crate which is generating code (MIR, etc). - # - # IMPORTANT: this flag is the only way that the Emit("metadata") and - # Emit("link") operations are allowed to diverge without causing them to - # get different crate hashes. - emit_args.add("-Zno-codegen") - effective_emit = Emit("link") + # Even though the unstable flag only appears on one of the branches, we need + # an identical environment between the `-Zno-codegen` and non-`-Zno-codegen` + # command or else there are "found possibly newer version of crate" errors. + emit_env["RUSTC_BOOTSTRAP"] = "1" + + if emit == Emit("metadata-full"): + if crate_type_codegen(crate_type): + # We don't ever have metadata-only deps on codegen crates, so we can + # fall back to the `metadata-fast` behavior. Normally though, this + # artifact should be unused and so this shouldn't matter. + effective_emit = "metadata" + else: + # As we're doing a pipelined build, instead of emitting an actual rmeta + # we emit a "hollow" .rlib - ie, it only contains lib.rmeta and no object + # code. It should contain full information needed by any dependent + # crate which is generating code (MIR, etc). + # + # IMPORTANT: this flag is the only way that the Emit("metadata") and + # Emit("link") operations are allowed to diverge without causing them to + # get different crate hashes. + emit_args.add("-Zno-codegen") + effective_emit = "link" + elif emit == Emit("metadata-fast") or emit == Emit("clippy"): + effective_emit = "metadata" else: - effective_emit = emit + effective_emit = emit.value - emit_args.add(cmd_args("--emit=", effective_emit.value, "=", emit_output.as_output(), delimiter = "")) + # When using deferred link, we still want to pass `--emit` to rustc to trigger + # the correct compilation behavior, but we do not want to pass emit_output here. + # Instead, we will bind the emit output to the actual deferred link action. + if deferred_link and effective_emit == "link": + emit_args.add(cmd_args("--emit=", effective_emit, delimiter = "")) + else: + emit_args.add(cmd_args("--emit=", effective_emit, "=", emit_output.as_output(), delimiter = "")) # Strip file extension from directory name. base, _ext = paths.split_extension(output_filename(simple_crate, emit, params)) @@ -1213,7 +1307,7 @@ def _rustc_emit( extra_out = ctx.actions.declare_output(extra_dir, dir = True) emit_args.add(cmd_args(extra_out.as_output(), format = "--out-dir={}")) - if ctx.attrs.incremental_enabled: + if incremental_enabled: build_mode = ctx.attrs.incremental_build_mode incremental_out = ctx.actions.declare_output("{}/extras/incremental/{}".format(subdir, build_mode)) incremental_cmd = cmd_args(incremental_out.as_output(), format = "-Cincremental={}") @@ -1226,38 +1320,49 @@ def _rustc_emit( extra_out = extra_out, ) +Invoke = record( + diag_txt = field(Artifact), + diag_json = field(Artifact), + build_status = field(Artifact | None), + identifier = field([str, None]), +) + # Invoke rustc and capture outputs def _rustc_invoke( ctx: AnalysisContext, compile_ctx: CompileContext, + common_args: CommonArgsInfo, prefix: str, rustc_cmd: cmd_args, - diag: str, required_outputs: list[Artifact], - short_cmd: str, - is_binary: bool, + is_clippy: bool, + infallible_diagnostics: bool, allow_cache_upload: bool, + incremental_enabled: bool, crate_map: list[(CrateName, Label)], - env: dict[str, str | ResolvedStringWithMacros | Artifact]) -> (dict[str, Artifact], [Artifact, None]): + env: dict[str, str | ResolvedStringWithMacros | Artifact], + deferred_link_cmd: cmd_args | None) -> Invoke: exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" toolchain_info = compile_ctx.toolchain_info - plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) + plain_env, path_env = process_env(compile_ctx, ctx.attrs.env, exec_is_windows) - more_plain_env, more_path_env = _process_env(compile_ctx, env, exec_is_windows) + more_plain_env, more_path_env = process_env(compile_ctx, env, exec_is_windows) plain_env.update(more_plain_env) path_env.update(more_path_env) # Save diagnostic outputs - json_diag = ctx.actions.declare_output("{}-{}.json".format(prefix, diag)) - txt_diag = ctx.actions.declare_output("{}-{}.txt".format(prefix, diag)) + diag = "clippy" if is_clippy else "diag" + diag_json = ctx.actions.declare_output("{}-{}.json".format(prefix, diag)) + diag_txt = ctx.actions.declare_output("{}-{}.txt".format(prefix, diag)) compile_cmd = cmd_args( - cmd_args(json_diag.as_output(), format = "--diag-json={}"), - cmd_args(txt_diag.as_output(), format = "--diag-txt={}"), - "--remap-cwd-prefix=.", + cmd_args(diag_json.as_output(), format = "--diag-json={}"), + cmd_args(diag_txt.as_output(), format = "--diag-txt={}"), + ["--remap-cwd-prefix=."] if not toolchain_info.nightly_features else [], "--buck-target={}".format(ctx.label.raw_target()), + hidden = [toolchain_info.compiler, compile_ctx.symlinked_srcs], ) for k, v in crate_map: @@ -1268,7 +1373,7 @@ def _rustc_invoke( compile_cmd.add(cmd_args("--path-env=", k, "=", v, delimiter = "")) build_status = None - if toolchain_info.failure_filter: + if infallible_diagnostics: # Build status for fail filter build_status = ctx.actions.declare_output("{}_build_status-{}.json".format(prefix, diag)) compile_cmd.add(cmd_args(build_status.as_output(), format = "--failure-filter={}")) @@ -1276,7 +1381,6 @@ def _rustc_invoke( compile_cmd.add("--required-output", out.short_path, out.as_output()) compile_cmd.add(rustc_cmd) - compile_cmd.hidden(toolchain_info.compiler, compile_ctx.symlinked_srcs) compile_cmd = _long_command( ctx = ctx, @@ -1285,26 +1389,59 @@ def _rustc_invoke( argfile_name = "{}-{}.args".format(prefix, diag), ) - incremental_enabled = ctx.attrs.incremental_enabled local_only = False prefer_local = False if incremental_enabled: local_only = True - elif is_binary and link_cxx_binary_locally(ctx): + elif common_args.crate_type == CrateType("bin") and \ + common_args.emit == Emit("link") and \ + link_cxx_binary_locally(ctx): prefer_local = True - identifier = "{} {} [{}]".format(prefix, short_cmd, diag) + if is_clippy: + category = "clippy" + identifier = "" + else: + category = "rustc" + identifier = _explain( + crate_type = common_args.crate_type, + link_strategy = common_args.params.dep_link_strategy, + emit = common_args.emit, + infallible_diagnostics = infallible_diagnostics, + ) + + if incremental_enabled: + if not identifier.endswith("]"): + identifier += " " + identifier += "[incr]" + ctx.actions.run( compile_cmd, local_only = local_only, - prefer_local = prefer_local, - category = "rustc", + # We only want to prefer_local here if rustc is performing the link + prefer_local = prefer_local and deferred_link_cmd == None, + category = category, identifier = identifier, no_outputs_cleanup = incremental_enabled, - allow_cache_upload = allow_cache_upload, + # We want to unconditionally cache object file compilations when rustc is not linking + allow_cache_upload = allow_cache_upload or deferred_link_cmd != None, ) - return ({diag + ".json": json_diag, diag + ".txt": txt_diag}, build_status) + if deferred_link_cmd: + ctx.actions.run( + deferred_link_cmd, + local_only = local_only, + prefer_local = prefer_local, + category = "deferred_link", + allow_cache_upload = allow_cache_upload, + ) + + return Invoke( + diag_txt = diag_txt, + diag_json = diag_json, + build_status = build_status, + identifier = identifier, + ) # Our rustc and rustdoc commands can have arbitrarily large number of `--extern` # flags, so write to file to avoid hitting the platform's limit on command line @@ -1314,11 +1451,22 @@ def _long_command( exe: RunInfo, args: cmd_args, argfile_name: str) -> cmd_args: - argfile, hidden = ctx.actions.write(argfile_name, args, allow_args = True) - return cmd_args(exe, cmd_args(argfile, format = "@{}")).hidden(args, hidden) + return cmd_args( + exe, + at_argfile( + actions = ctx.actions, + name = argfile_name, + args = args, + allow_args = True, + ), + ) _DOUBLE_ESCAPED_NEWLINE_RE = regex("\\\\n") _ESCAPED_NEWLINE_RE = regex("\\n") +_DIRECTORY_ENV = [ + "CARGO_MANIFEST_DIR", + "OUT_DIR", +] # Separate env settings into "plain" and "with path". Path env vars are often # used in Rust `include!()` and similar directives, which always interpret the @@ -1328,10 +1476,11 @@ _ESCAPED_NEWLINE_RE = regex("\\n") # paths to absolute paths so they'll work in any context. Hence the need to # distinguish path from non-path. (This will not work if the value contains both # path and non-path content, but we'll burn that bridge when we get to it.) -def _process_env( +def process_env( compile_ctx: CompileContext, env: dict[str, str | ResolvedStringWithMacros | Artifact], - exec_is_windows: bool) -> (dict[str, cmd_args], dict[str, cmd_args]): + exec_is_windows: bool, + escape_for_rustc_action: bool = True) -> (dict[str, cmd_args], dict[str, cmd_args]): # Values with inputs (ie artifact references). path_env = {} @@ -1342,12 +1491,20 @@ def _process_env( v = cmd_args(v) if len(v.inputs) > 0: path_env[k] = v - else: + elif escape_for_rustc_action: # Environment variables may have newlines, escape them for now. # Will be unescaped in rustc_action. # Variable may have "\\n" as well. # Example: \\n\n -> \\\n\n -> \\\\n\\n - plain_env[k] = v.replace_regex(_DOUBLE_ESCAPED_NEWLINE_RE, "\\\n").replace_regex(_ESCAPED_NEWLINE_RE, "\\n") + plain_env[k] = cmd_args( + v, + replace_regex = [ + (_DOUBLE_ESCAPED_NEWLINE_RE, "\\\n"), + (_ESCAPED_NEWLINE_RE, "\\n"), + ], + ) + else: + plain_env[k] = cmd_args(v) # If CARGO_MANIFEST_DIR is not already expressed in terms of $(location ...) # of some target, then interpret it as a relative path inside of the crate's @@ -1384,13 +1541,14 @@ def _process_env( # and proc macros using std::fs to read thing like .pest grammars, which # would need paths relative to the directory that rustc got invoked in # (which is the repo root in Buck builds). - cargo_manifest_dir = plain_env.pop("CARGO_MANIFEST_DIR", None) - if cargo_manifest_dir: - path_env["CARGO_MANIFEST_DIR"] = cmd_args( - compile_ctx.symlinked_srcs, - "\\" if exec_is_windows else "/", - cargo_manifest_dir, - delimiter = "", - ) + for key in _DIRECTORY_ENV: + value = plain_env.pop(key, None) + if value: + path_env[key] = cmd_args( + compile_ctx.symlinked_srcs, + "\\" if exec_is_windows else "/", + value, + delimiter = "", + ) return (plain_env, path_env) diff --git a/prelude/rust/build_params.bzl b/prelude/rust/build_params.bzl index d0cdc8f36f1..f21c280c65a 100644 --- a/prelude/rust/build_params.bzl +++ b/prelude/rust/build_params.bzl @@ -7,6 +7,7 @@ # Rules for mapping requirements to options +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerType") load( "@prelude//linking:link_info.bzl", "LibOutputStyle", @@ -29,10 +30,6 @@ CrateType = enum( "staticlib", ) -# Crate type is intended for consumption by Rust code -def crate_type_rust_linkage(crate_type: CrateType) -> bool: - return crate_type.value in ("rlib", "dylib", "proc-macro") - # Crate type is intended for native linkage (eg C++) def crate_type_native_linkage(crate_type: CrateType) -> bool: return crate_type.value in ("cdylib", "staticlib") @@ -64,16 +61,44 @@ Emit = enum( "llvm-bc", "llvm-ir", "obj", - "metadata", "link", "dep-info", "mir", "expand", # pseudo emit alias for -Zunpretty=expanded + "clippy", + # Rustc actually has two different forms of metadata: + # - The full flavor, which is what's outputted when passing + # `--emit link,metadata` and can be used as a part of pipelined builds + # - The fast flavor, which is emitted from `--emit metadata`, is faster to + # build, but cannot be used in pipelined builds. + "metadata-full", + "metadata-fast", +) + +# The different quantities of Rust metadata that can be requested from +# dependencies. Each one corresponds to an `Emit` variant, but not all `Emit` +# variants output metadata +MetadataKind = enum( + "fast", + "full", + "link", ) # Emitting this artifact generates code -def emit_needs_codegen(emit: Emit) -> bool: - return emit.value in ("asm", "llvm-bc", "llvm-ir", "obj", "link", "mir") +def dep_metadata_of_emit(emit: Emit) -> MetadataKind: + return { + Emit("asm"): MetadataKind("link"), + Emit("llvm-bc"): MetadataKind("link"), + Emit("llvm-ir"): MetadataKind("link"), + Emit("obj"): MetadataKind("link"), + Emit("link"): MetadataKind("link"), + Emit("mir"): MetadataKind("link"), + Emit("metadata-fast"): MetadataKind("fast"), + Emit("clippy"): MetadataKind("fast"), + Emit("dep-info"): MetadataKind("full"), + Emit("expand"): MetadataKind("full"), + Emit("metadata-full"): MetadataKind("full"), + }[emit] # Represents a way of invoking rustc to produce an artifact. These values are computed from # information such as the rule type, linkstyle, crate type, etc. @@ -91,6 +116,7 @@ BuildParams = record( RustcFlags = record( crate_type = field(CrateType), platform_to_affix = field(typing.Callable), + link_strategy = field(LinkStrategy | None), ) # Filenames used for various emitted forms @@ -100,11 +126,13 @@ _EMIT_PREFIX_SUFFIX = { Emit("llvm-bc"): ("", ".bc"), Emit("llvm-ir"): ("", ".ll"), Emit("obj"): ("", ".o"), - Emit("metadata"): ("lib", ".rmeta"), # even binaries get called 'libfoo.rmeta' + Emit("metadata-fast"): ("lib", ".rmeta"), # even binaries get called 'libfoo.rmeta' + Emit("metadata-full"): (None, None), # Hollow rlibs, so they get the same name Emit("link"): (None, None), # crate type and reloc model dependent Emit("dep-info"): ("", ".d"), Emit("mir"): (None, ".mir"), Emit("expand"): (None, ".rs"), + Emit("clippy"): ("lib", ".rmeta"), # Treated like metadata-fast } # Return the filename for a particular emitted artifact type @@ -150,6 +178,7 @@ LinkageLang = enum( ) _BINARY = 0 +_RUST_PROC_MACRO_RUSTDOC_TEST = 1 _NATIVE_LINKABLE_SHARED_OBJECT = 3 _RUST_DYLIB_SHARED = 4 _RUST_PROC_MACRO = 5 @@ -158,62 +187,86 @@ _RUST_STATIC_NON_PIC_LIBRARY = 7 _NATIVE_LINKABLE_STATIC_PIC = 8 _NATIVE_LINKABLE_STATIC_NON_PIC = 9 -def _executable_prefix_suffix(linker_type: str, target_os_type: OsLookup) -> (str, str): +def _executable_prefix_suffix(linker_type: LinkerType, target_os_type: OsLookup) -> (str, str): return { - "darwin": ("", ""), - "gnu": ("", ".exe") if target_os_type.platform == "windows" else ("", ""), - "wasm": ("", ".wasm"), - "windows": ("", ".exe"), + LinkerType("darwin"): ("", ""), + LinkerType("gnu"): ("", ".exe") if target_os_type.platform == "windows" else ("", ""), + LinkerType("wasm"): ("", ".wasm"), + LinkerType("windows"): ("", ".exe"), }[linker_type] -def _library_prefix_suffix(linker_type: str, target_os_type: OsLookup) -> (str, str): +def _library_prefix_suffix(linker_type: LinkerType, target_os_type: OsLookup) -> (str, str): return { - "darwin": ("lib", ".dylib"), - "gnu": ("", ".dll") if target_os_type.platform == "windows" else ("lib", ".so"), - "wasm": ("", ".wasm"), - "windows": ("", ".dll"), + LinkerType("darwin"): ("lib", ".dylib"), + LinkerType("gnu"): ("", ".dll") if target_os_type.platform == "windows" else ("lib", ".so"), + LinkerType("wasm"): ("", ".wasm"), + LinkerType("windows"): ("", ".dll"), }[linker_type] _BUILD_PARAMS = { _BINARY: RustcFlags( crate_type = CrateType("bin"), platform_to_affix = _executable_prefix_suffix, + # link_strategy is provided by the rust_binary attribute + link_strategy = None, + ), + # It's complicated: this is a rustdoc test for a procedural macro crate. + # We need deps built as if this were a binary, while passing crate-type + # proc_macro to the rustdoc invocation. + _RUST_PROC_MACRO_RUSTDOC_TEST: RustcFlags( + crate_type = CrateType("proc-macro"), + platform_to_affix = _executable_prefix_suffix, + link_strategy = LinkStrategy("static_pic"), ), _NATIVE_LINKABLE_SHARED_OBJECT: RustcFlags( crate_type = CrateType("cdylib"), platform_to_affix = _library_prefix_suffix, + # cdylibs statically link all rust code and export a single C-style dylib + # for consumption by other languages + link_strategy = LinkStrategy("shared"), ), _RUST_DYLIB_SHARED: RustcFlags( crate_type = CrateType("dylib"), platform_to_affix = _library_prefix_suffix, + link_strategy = LinkStrategy("shared"), ), _RUST_PROC_MACRO: RustcFlags( crate_type = CrateType("proc-macro"), platform_to_affix = _library_prefix_suffix, + # FIXME(JakobDegen): It's not really clear what we should do about + # proc macros. The principled thing is probably to treat them sort + # of like a normal library, except that they always have preferred + # linkage shared? Preserve existing behavior for now + link_strategy = LinkStrategy("static_pic"), ), # FIXME(JakobDegen): Add a comment explaining why `.a`s need reloc-strategy # dependent names while `.rlib`s don't. _RUST_STATIC_PIC_LIBRARY: RustcFlags( crate_type = CrateType("rlib"), platform_to_affix = lambda _l, _t: ("lib", ".rlib"), + link_strategy = LinkStrategy("static_pic"), ), _RUST_STATIC_NON_PIC_LIBRARY: RustcFlags( crate_type = CrateType("rlib"), platform_to_affix = lambda _l, _t: ("lib", ".rlib"), + link_strategy = LinkStrategy("static"), ), _NATIVE_LINKABLE_STATIC_PIC: RustcFlags( crate_type = CrateType("staticlib"), platform_to_affix = lambda _l, _t: ("lib", "_pic.a"), + link_strategy = LinkStrategy("static_pic"), ), _NATIVE_LINKABLE_STATIC_NON_PIC: RustcFlags( crate_type = CrateType("staticlib"), platform_to_affix = lambda _l, _t: ("lib", ".a"), + link_strategy = LinkStrategy("static"), ), } _INPUTS = { # Binary ("binary", False, None, "rust"): _BINARY, + ("binary", True, None, "rust"): _RUST_PROC_MACRO_RUSTDOC_TEST, # Native linkable shared object ("library", False, "shared_lib", "native"): _NATIVE_LINKABLE_SHARED_OBJECT, # Native unbundled linkable shared object @@ -286,50 +339,14 @@ def build_params( link_strategy: LinkStrategy | None, lib_output_style: LibOutputStyle | None, lang: LinkageLang, - linker_type: str, + linker_type: LinkerType, target_os_type: OsLookup) -> BuildParams: if rule == RuleType("binary"): expect(link_strategy != None) expect(lib_output_style == None) else: - expect(link_strategy == None) expect(lib_output_style != None) - # FIXME(JakobDegen): We deal with Rust needing to know the link strategy - # even for building archives by using a default link strategy specifically - # for those cases. I've gone through the code and checked all the places - # where the link strategy is used to determine that this won't do anything - # too bad, but it would be nice to enforce that more strictly or not have - # this at all. - def default_link_strategy_for_output_style(output_style: LibOutputStyle) -> LinkStrategy: - if output_style == LibOutputStyle("archive"): - return LinkStrategy("static") - if output_style == LibOutputStyle("pic_archive"): - return LinkStrategy("static_pic") - - # Rust does not have the `link_style` attribute on libraries in the same - # way that C++ does - if it did, this is what it would affect. - return LinkStrategy("shared") - - if not link_strategy: - if proc_macro: - # FIXME(JakobDegen): It's not really clear what we should do about - # proc macros. The principled thing is probably to treat them sort - # of like a normal library, except that they always have preferred - # linkage shared? Preserve existing behavior for now - link_strategy = LinkStrategy("static_pic") - else: - link_strategy = default_link_strategy_for_output_style(lib_output_style) - - if rule == RuleType("binary") and proc_macro: - # It's complicated: this is a rustdoc test for a procedural macro crate. - # We need deps built as if this were a binary, while passing crate-type - # proc_macro to the rustdoc invocation. - crate_type = CrateType("proc-macro") - proc_macro = False - else: - crate_type = None - input = (rule.value, proc_macro, lib_output_style.value if lib_output_style else None, lang.value) expect( @@ -342,11 +359,19 @@ def build_params( ) flags = _BUILD_PARAMS[_INPUTS[input]] + + # FIXME(JakobDegen): We deal with Rust needing to know the link strategy + # even for building archives by using a default link strategy specifically + # for those cases. I've gone through the code and checked all the places + # where the link strategy is used to determine that this won't do anything + # too bad, but it would be nice to enforce that more strictly or not have + # this at all. + link_strategy = link_strategy or flags.link_strategy reloc_model = _get_reloc_model(link_strategy, target_os_type) prefix, suffix = flags.platform_to_affix(linker_type, target_os_type) return BuildParams( - crate_type = crate_type or flags.crate_type, + crate_type = flags.crate_type, reloc_model = reloc_model, dep_link_strategy = link_strategy, prefix = prefix, diff --git a/prelude/rust/cargo_buildscript.bzl b/prelude/rust/cargo_buildscript.bzl index 3602bab0ae7..52722021fe4 100644 --- a/prelude/rust/cargo_buildscript.bzl +++ b/prelude/rust/cargo_buildscript.bzl @@ -20,15 +20,20 @@ load("@prelude//:prelude.bzl", "native") load("@prelude//decls:common.bzl", "buck") -load("@prelude//linking:link_info.bzl", "LinkStrategy") load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//rust:rust_toolchain.bzl", "RustToolchainInfo") load("@prelude//rust:targets.bzl", "targets") load("@prelude//decls/toolchains_common.bzl", "toolchains_common") load(":build.bzl", "dependency_args") -load(":build_params.bzl", "CrateType") +load(":build_params.bzl", "MetadataKind") load(":context.bzl", "DepCollectionContext") -load(":link_info.bzl", "RustProcMacroPlugin", "gather_explicit_sysroot_deps", "resolve_rust_deps_inner") +load( + ":link_info.bzl", + "DEFAULT_STATIC_LINK_STRATEGY", + "RustProcMacroPlugin", + "gather_explicit_sysroot_deps", + "resolve_rust_deps_inner", +) load(":rust_toolchain.bzl", "PanicRuntime") def _make_rustc_shim(ctx: AnalysisContext, cwd: Artifact) -> cmd_args: @@ -48,22 +53,20 @@ def _make_rustc_shim(ctx: AnalysisContext, cwd: Artifact) -> cmd_args: deps = gather_explicit_sysroot_deps(dep_ctx) deps = resolve_rust_deps_inner(ctx, deps) dep_args, _ = dependency_args( - ctx, - None, # compile_ctx - deps, - "any", # subdir - CrateType("rlib"), - LinkStrategy("static_pic"), - True, # is_check - False, # is_rustdoc_test + ctx = ctx, + compile_ctx = None, + toolchain_info = toolchain_info, + deps = deps, + subdir = "any", + dep_link_strategy = DEFAULT_STATIC_LINK_STRATEGY, + dep_metadata_kind = MetadataKind("full"), + is_rustdoc_test = False, ) null_path = "nul" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "/dev/null" - dep_args = cmd_args("--sysroot=" + null_path, dep_args) - dep_args = cmd_args("-Zunstable-options", dep_args) - dep_args = dep_args.relative_to(cwd) + dep_args = cmd_args("--sysroot=" + null_path, dep_args, relative_to = cwd) dep_file, _ = ctx.actions.write("rustc_dep_file", dep_args, allow_args = True) - sysroot_args = cmd_args("@", dep_file, delimiter = "").hidden(dep_args) + sysroot_args = cmd_args("@", dep_file, delimiter = "", hidden = dep_args) else: sysroot_args = cmd_args() @@ -72,7 +75,7 @@ def _make_rustc_shim(ctx: AnalysisContext, cwd: Artifact) -> cmd_args: "__rustc_shim.bat", [ "@echo off", - cmd_args(toolchain_info.compiler, sysroot_args, "%*", delimiter = " ").relative_to(cwd), + cmd_args(toolchain_info.compiler, sysroot_args, "%*", delimiter = " ", relative_to = cwd), ], allow_args = True, ) @@ -81,12 +84,12 @@ def _make_rustc_shim(ctx: AnalysisContext, cwd: Artifact) -> cmd_args: "__rustc_shim.sh", [ "#!/usr/bin/env bash", - cmd_args(toolchain_info.compiler, sysroot_args, "\"$@\"\n", delimiter = " ").relative_to(cwd), + cmd_args(toolchain_info.compiler, sysroot_args, "\"$@\"\n", delimiter = " ", relative_to = cwd), ], is_executable = True, allow_args = True, ) - return cmd_args(shim).relative_to(cwd).hidden(toolchain_info.compiler).hidden(sysroot_args) + return cmd_args(shim, relative_to = cwd, hidden = [toolchain_info.compiler, sysroot_args]) def _cargo_buildscript_impl(ctx: AnalysisContext) -> list[Provider]: toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] @@ -131,7 +134,7 @@ def _cargo_buildscript_impl(ctx: AnalysisContext) -> list[Provider]: # Environment variables specified in the target's attributes get priority # over all the above. for k, v in ctx.attrs.env.items(): - env[k] = cmd_args(v).relative_to(cwd) + env[k] = cmd_args(v, relative_to = cwd) ctx.actions.run( cmd, diff --git a/prelude/rust/cargo_package.bzl b/prelude/rust/cargo_package.bzl index d9b50510712..b80b51f90cf 100644 --- a/prelude/rust/cargo_package.bzl +++ b/prelude/rust/cargo_package.bzl @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# This file exports utilities for use with with reindeer. +# These are not used anywhere else in prelude and are not exported as prelude globals. + load("@prelude//:prelude.bzl", "native") load("@prelude//utils:selects.bzl", "selects") diff --git a/prelude/rust/clippy_configuration.bzl b/prelude/rust/clippy_configuration.bzl new file mode 100644 index 00000000000..74f0c3f6d79 --- /dev/null +++ b/prelude/rust/clippy_configuration.bzl @@ -0,0 +1,58 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//rust:rust_toolchain.bzl", "RustToolchainInfo") +load("@prelude//decls/toolchains_common.bzl", "toolchains_common") + +# Configurations for Clippy runs. +ClippyConfiguration = provider( + fields = { + "clippy_toml": provider_field(Artifact), + }, +) + +def _clippy_configuration_impl(ctx: AnalysisContext) -> list[Provider]: + toolchain_ctx = ctx.attrs._rust_toolchain[RustToolchainInfo] + toolchain_clippy_toml = toolchain_ctx.clippy_toml + + if not toolchain_clippy_toml: + clippy_toml = ctx.attrs.clippy_toml_src + else: + toml_merge_tool = ctx.attrs.toml_merge_tool + + clippy_toml = ctx.actions.declare_output("clippy.toml") + ctx.actions.run([ + toml_merge_tool[RunInfo], + cmd_args(clippy_toml.as_output(), format = "--output={}"), + cmd_args(toolchain_clippy_toml, format = "--file={}"), + cmd_args(ctx.attrs.clippy_toml_src, format = "--file={}"), + ], category = "clippy_toml_merge") + + return [ + DefaultInfo( + default_output = clippy_toml, + ), + ClippyConfiguration( + clippy_toml = clippy_toml, + ), + ] + +# Generate a Clippy configuration that is merged with the toolchain specified +# Clippy configuration (if defined). +clippy_configuration = rule(impl = _clippy_configuration_impl, attrs = { + "clippy_toml_src": attrs.source(), + # TODO(emersonford): figure out how to store this in `_rust_toolchain` + # without causing a circular dependency on the toolchain target when + # `toml_merge_tool` is a `rust_binary`. + # + # Tool used to recursively merge multiple TOML files, e.g. for merging + # clippy.toml files. Must support taking multiple `--file ` flags + # as source files to merge and `--output ` flag to write the + # merged TOML table to. + "toml_merge_tool": attrs.exec_dep(providers = [RunInfo]), + "_rust_toolchain": toolchains_common.rust(), +}) diff --git a/prelude/rust/context.bzl b/prelude/rust/context.bzl index 74e76837ff8..8c098bb7df4 100644 --- a/prelude/rust/context.bzl +++ b/prelude/rust/context.bzl @@ -7,12 +7,12 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") load("@prelude//linking:link_info.bzl", "LinkStrategy") -load(":build_params.bzl", "CrateType", "Emit") +load(":build_params.bzl", "BuildParams", "CrateType", "Emit") load(":rust_toolchain.bzl", "PanicRuntime", "RustExplicitSysrootDeps", "RustToolchainInfo") CrateName = record( - simple = field(str), - dynamic = field([Artifact, None]), + simple = field(str | ResolvedStringWithMacros), + dynamic = field(Artifact | None), ) # Struct for sharing common args between rustc and rustdoc @@ -21,8 +21,10 @@ CommonArgsInfo = record( args = field(cmd_args), subdir = field(str), tempfile = field(str), - short_cmd = field(str), - is_check = field(bool), + crate_type = field(CrateType), + params = field(BuildParams), + emit = field(Emit), + emit_requires_linking = field(bool), crate_map = field(list[(CrateName, Label)]), ) @@ -53,7 +55,7 @@ CompileContext = record( # Clippy wrapper (wrapping clippy-driver so it has the same CLI as rustc). clippy_wrapper = field(cmd_args), # Memoized common args for reuse. - common_args = field(dict[(CrateType, Emit, LinkStrategy, bool), CommonArgsInfo]), + common_args = field(dict[(CrateType, Emit, LinkStrategy, bool, bool, bool), CommonArgsInfo]), transitive_dependency_dirs = field(dict[Artifact, None]), sysroot_args = field(cmd_args), ) diff --git a/prelude/rust/extern.bzl b/prelude/rust/extern.bzl index d2702ded185..2c19ad4bb13 100644 --- a/prelude/rust/extern.bzl +++ b/prelude/rust/extern.bzl @@ -7,6 +7,14 @@ load(":context.bzl", "CrateName") +def crate_name_as_cmd_arg(crate: CrateName) -> cmd_args | str | ResolvedStringWithMacros: + if crate.dynamic: + # TODO: consider using `cmd_args(crate.dynamic, quote = "json")` so it + # doesn't fall apart on paths containing ')' + return cmd_args(crate.dynamic, format = "$(cat {})") + else: + return crate.simple + # Create `--extern` flag. For crates with a name computed during analysis: # # --extern=NAME=path/to/libNAME.rlib @@ -21,14 +29,14 @@ def extern_arg(flags: list[str], crate: CrateName, lib: Artifact) -> cmd_args: else: flags = ",".join(flags) + ":" - if crate.dynamic: - # TODO: consider using `cmd_args(crate.dynamic, quote = "json")` so it - # doesn't fall apart on paths containing ')' - crate_name = cmd_args(crate.dynamic, format = "$(cat {})") - else: - crate_name = crate.simple - - return cmd_args("--extern=", flags, crate_name, "=", lib, delimiter = "") + return cmd_args( + "--extern=", + flags, + crate_name_as_cmd_arg(crate), + "=", + lib, + delimiter = "", + ) # Create `--crate-map` flag. For crates with a name computed during analysis: # @@ -39,9 +47,10 @@ def extern_arg(flags: list[str], crate: CrateName, lib: Artifact) -> cmd_args: # --crate-map=$(cat path/to/REALNAME)=//path/to:target # def crate_map_arg(crate: CrateName, label: Label) -> cmd_args: - if crate.dynamic: - crate_name = cmd_args(crate.dynamic, format = "$(cat {})") - else: - crate_name = crate.simple - - return cmd_args("--crate-map=", crate_name, "=", str(label.raw_target()), delimiter = "") + return cmd_args( + "--crate-map=", + crate_name_as_cmd_arg(crate), + "=", + str(label.raw_target()), + delimiter = "", + ) diff --git a/prelude/rust/failure_filter.bzl b/prelude/rust/failure_filter.bzl index 7a8fa9ff313..67533c1da78 100644 --- a/prelude/rust/failure_filter.bzl +++ b/prelude/rust/failure_filter.bzl @@ -7,16 +7,6 @@ load(":context.bzl", "CompileContext") -# Inputs to the fail filter -RustFailureFilter = provider(fields = { - # Build status json - "buildstatus": typing.Any, - # Required files - "required": typing.Any, - # stderr - "stderr": typing.Any, -}) - # This creates an action which takes a buildstatus json artifact as an input, and a list of other # artifacts. If all those artifacts are present in the buildstatus as successfully generated, then # the action will succeed with those artifacts as outputs. Otherwise it fails. @@ -24,19 +14,16 @@ RustFailureFilter = provider(fields = { def failure_filter( ctx: AnalysisContext, compile_ctx: CompileContext, - prefix: str, - predecl_out: [Artifact, None], - failprov: RustFailureFilter, - short_cmd: str) -> Artifact: + predeclared_output: Artifact | None, + build_status: Artifact, + required: Artifact, + stderr: Artifact, + identifier: str) -> Artifact: toolchain_info = compile_ctx.toolchain_info failure_filter_action = toolchain_info.failure_filter_action - buildstatus = failprov.buildstatus - required = failprov.required - stderr = failprov.stderr - - if predecl_out: - output = predecl_out + if predeclared_output: + output = predeclared_output else: output = ctx.actions.declare_output("out/" + required.short_path) @@ -49,9 +36,9 @@ def failure_filter( required, output.as_output(), "--build-status", - buildstatus, + build_status, ) - ctx.actions.run(cmd, category = "failure_filter", identifier = "{} {}".format(prefix, short_cmd)) + ctx.actions.run(cmd, category = "failure_filter", identifier = identifier) return output diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index e392a74050b..e955e2b3a16 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -16,6 +16,7 @@ load( "@prelude//cxx:cxx.bzl", "get_auto_link_group_specs", ) +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_is_gnu", @@ -23,7 +24,6 @@ load( load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load( "@prelude//cxx:link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLinkInfo", # @unused Used as a type "create_link_groups", "get_filtered_labels_to_links_map", @@ -32,6 +32,16 @@ load( "get_link_group", "get_link_group_info", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", +) +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type +) +load( + "@prelude//cxx:linker.bzl", + "get_default_shared_library_name", + "get_shared_library_name_for_param", ) load( "@prelude//linking:link_groups.bzl", @@ -40,9 +50,9 @@ load( ) load( "@prelude//linking:link_info.bzl", + "LibOutputStyle", "LinkInfo", "LinkStrategy", - "Linkage", # @unused Used as a type "MergedLinkInfo", "get_link_args_for_strategy", "unpack_external_debug_info", @@ -57,22 +67,36 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", ) +load( + "@prelude//linking:types.bzl", + "Linkage", # @unused Used as a type +) +load( + "@prelude//utils:type_defs.bzl", + "is_dict", + "is_string", +) +load( + ":build_params.bzl", + "MetadataKind", # @unused Used as a type +) load( ":context.bzl", "CrateName", # @unused Used as a type "DepCollectionContext", # @unused Used as a type ) -load(":rust_toolchain.bzl", "PanicRuntime") +load(":rust_toolchain.bzl", "PanicRuntime", "RustToolchainInfo") # Link strategy for targets which do not set an explicit `link_style` attribute. +# +# These values are also used as the defaults for check/clippy subtargets on +# libraries, and are the only way in which metadata-fast output can be built. +# +# Internally at Meta, these are a good choice for a default because they allow +# sharing work between check builds and dev mode builds, which have shared link +# strategy, and so consume their dependencies as `static_pic`. DEFAULT_STATIC_LINK_STRATEGY = LinkStrategy("static_pic") - -# Override dylib crates to static_pic, so that Rust code is always -# statically linked. -# In v1 we always linked Rust deps statically, even for "shared" link style -# That shouldn't be necessary, but fully shared needs some more debugging, -# so default to v1 behaviour. (Should be controlled with the `rust.force_rlib` option) -FORCE_RLIB = True +DEFAULT_STATIC_LIB_OUTPUT_STYLE = LibOutputStyle("pic_archive") RustProcMacroPlugin = plugins.kind() @@ -86,22 +110,16 @@ RustProcMacroMarker = provider(fields = { # Information which is keyed on link_style RustLinkStrategyInfo = record( - # Path to library or binary - rlib = field(Artifact), + # Path to the rlib, rmeta, dylib, etc. + outputs = field(dict[MetadataKind, Artifact]), # Transitive dependencies which are relevant to the consumer. For crate types which do not # propagate their deps (specifically proc macros), this set is empty # This does not include the proc macros, which are passed separately in `RustLinkInfo` - transitive_deps = field(dict[Artifact, CrateName]), - - # Path for library metadata (used for check or pipelining) - rmeta = field(Artifact), - # Transitive rmeta deps. This is the same dict as `transitive_deps`, except that it has the - # rmeta and not the rlib artifact - transitive_rmeta_deps = field(dict[Artifact, CrateName]), + transitive_deps = field(dict[MetadataKind, dict[Artifact, CrateName]]), transitive_proc_macro_deps = field(dict[RustProcMacroMarker, ()]), # Path to PDB file with Windows debug data. - pdb = field([Artifact, None]), + pdb = field(Artifact | None), # Debug info which is referenced -- but not included -- by the linkable rlib. external_debug_info = field(ArtifactTSet), ) @@ -114,11 +132,28 @@ RustLinkInfo = provider( "crate": CrateName, # strategies - information about each LinkStrategy as RustLinkStrategyInfo "strategies": dict[LinkStrategy, RustLinkStrategyInfo], - # Rust interacts with the native link graph in a non-standard way. Specifically, imagine we - # have a Rust library `:B` with its only one dependency `:A`, another Rust library. The Rust - # rules give Rust -> Rust dependencies special treatment, and as a result, the - # `MergedLinkInfo` provided from `:B` is not a "superset" of the `MergedLinkInfo` provided - # from `:A` (concrete differences discussed below). + # Rust interacts with the native link graph in a non-standard way. + # + # The first difference is in the re-export behavior of Rust compared to C++. The native link + # providers make an assumption that if one node in the link graph references a symbol in + # another node in the link graph, there is also a corresponding edge in the link graph. + # Specifically, the first node must declare a direct dependency on the second, a transitive + # dependency is not enough. For C++, this just means that each library depends in the link + # graph on its direct deps and their exported deps. + # + # For Rust, the situation is different. Because of re-exports and generics causing delayed + # codegen, the generated object files for a Rust library can generate symbol references to + # any of the library's transitive Rust dependencies, as well as to the immediate C++ + # dependencies of those libraries. So to account for that, each Rust library reports direct + # dependencies on all of those libraries in the link graph. The `merged_link_infos` and + # `linkable_graphs` lists are the providers from all of those libraries. + # + # The second difference is unique to the case where `advanced_unstable_linking` is not set + # on the toolchain. Imagine we have a Rust library `:B` with its only one dependency `:A`, + # another Rust library. The Rust rules give Rust -> Rust dependencies special treatment in + # the non-`advanced_unstable_linking` case. As a result, the `MergedLinkInfo` provided from + # `:B` is not a "superset" of the `MergedLinkInfo` provided from `:A` (concrete differences + # discussed below). # # This distinction is implemented by effectively having each Rust library provide two sets # of link providers. The first is the link providers used across Rust -> Rust dependency @@ -127,27 +162,18 @@ RustLinkInfo = provider( # is a superset of the first, that is it includes anything that the first link providers # added. # - # The way in which the native link providers and Rust link providers differ depends on - # whether `advanced_unstable_linking` is set on the toolchain. + # The concrete difference is that the Rust `MergedLinkInfo` provided by `:A` is only the + # result of merging the `MergedLinkInfo`s from `:A`'s deps, and does not contain anything + # about `:A`. Instead, when `:B` produces the native `MergedLinkInfo`, it will add a single + # static library that bundles all transitive Rust deps, including `:A` (and similarly for + # the DSO case). # - # * Without `advanced_unstable_linking`, the Rust `MergedLinkInfo` provided by `:A` is only - # the result of merging the `MergedLinkInfo`s from `:A`'s deps, and does not contain - # anything about `:A`. Instead, when `:B` produces the native `MergedLinkInfo`, it will - # add a single static library that bundles all transitive Rust deps, including `:A` (and - # similarly for the DSO case). - # * With `advanced_unstable_linking`, the Rust `MergedLinkInfo` provided by a `:A` does - # include a linkable from `:A`, however that linkable is always the rlib (a static - # library), regardless of `:A`'s preferred linkage or the link strategy. This matches the - # `FORCE_RLIB` behavior, in which Rust -> Rust dependency edges are always statically - # linked. The native link provider then depends on that, and only adds a linkable for the - # `shared_lib` case. - "merged_link_info": MergedLinkInfo, - "shared_libs": SharedLibraryInfo, - # Because of the weird representation of `LinkableGraph`, there is no - # correct way to merge multiple linkable graphs without adding a new - # node at the same time. So we store a list to be able to depend on more - # than one + # With `advanced_unstable_linkin`, Rust libraries essentially behave just like C++ + # libraries in the link graph, with the handling of transitive dependencies being the only + # difference. + "merged_link_infos": dict[ConfiguredTargetLabel, MergedLinkInfo], "linkable_graphs": list[LinkableGraph], + "shared_libs": SharedLibraryInfo, # LinkGroupLibInfo intentionally omitted because the Rust -> Rust version # never needs to be different from the Rust -> native version # @@ -163,14 +189,14 @@ RustLinkInfo = provider( }, ) -def _adjust_link_strategy_for_rust_dependencies(dep_link_strategy: LinkStrategy) -> LinkStrategy: - if FORCE_RLIB and dep_link_strategy == LinkStrategy("shared"): +def _adjust_link_strategy_for_rust_dependencies(toolchain_info: RustToolchainInfo, dep_link_strategy: LinkStrategy) -> LinkStrategy: + if dep_link_strategy == LinkStrategy("shared") and not toolchain_info.advanced_unstable_linking: return DEFAULT_STATIC_LINK_STRATEGY else: return dep_link_strategy -def strategy_info(info: RustLinkInfo, dep_link_strategy: LinkStrategy) -> RustLinkStrategyInfo: - rust_dep_link_strategy = _adjust_link_strategy_for_rust_dependencies(dep_link_strategy) +def strategy_info(toolchain_info: RustToolchainInfo, info: RustLinkInfo, dep_link_strategy: LinkStrategy) -> RustLinkStrategyInfo: + rust_dep_link_strategy = _adjust_link_strategy_for_rust_dependencies(toolchain_info, dep_link_strategy) return info.strategies[rust_dep_link_strategy] @@ -179,7 +205,7 @@ RustOrNativeDependency = record( # The actual dependency dep = field(Dependency), # The local name, if any (for `named_deps`) - name = field([None, str]), + name = field(None | str | ResolvedStringWithMacros), # Any flags for the dependency (`flagged_deps`), which are passed on to rustc. flags = field(list[str]), ) @@ -187,7 +213,8 @@ RustOrNativeDependency = record( RustDependency = record( info = field(RustLinkInfo), label = field(ConfiguredProvidersLabel), - name = field([None, str]), + dep = field(Dependency), + name = field(None | str | ResolvedStringWithMacros), flags = field(list[str]), proc_macro_marker = field([None, RustProcMacroMarker]), ) @@ -206,34 +233,29 @@ RustCxxLinkGroupInfo = record( link_group_libs = field(dict[str, [LinkGroupLib, None]]), # mapping from target labels to the corresponding link group link_info labels_to_links_map = field(dict[Label, LinkGroupLinkInfo]), + # Target to link group name where it was actually linked into + targets_consumed_by_link_groups = field(dict[Label, str]), # preferred linkage mode for link group libraries link_group_preferred_linkage = field(dict[Label, Linkage]), ) -def enable_link_groups( - ctx: AnalysisContext, - link_strategy: [LinkStrategy, None], - specified_link_strategy: LinkStrategy, - is_binary: bool): - if not (cxx_is_gnu(ctx) and is_binary): - # check minium requirements +def enable_link_groups(ctx: AnalysisContext): + if not cxx_is_gnu(ctx): + # check minimum requirements return False - if link_strategy == LinkStrategy("shared") or link_strategy != specified_link_strategy: - # check whether we should run link groups analysis for the given link strategy - return False - - # check whether link groups is enabled return ctx.attrs.auto_link_groups and ctx.attrs.link_group_map # Returns all first-order dependencies. def _do_resolve_deps( deps: list[Dependency], - named_deps: dict[str, Dependency], + named_deps: dict[str, Dependency] | list[(ResolvedStringWithMacros, Dependency)], flagged_deps: list[(Dependency, list[str])] = []) -> list[RustOrNativeDependency]: + named_deps_items = named_deps.items() if is_dict(named_deps) else named_deps + return [ RustOrNativeDependency(name = name, dep = dep, flags = flags) for name, dep, flags in [(None, dep, []) for dep in deps] + - [(name, dep, []) for name, dep in named_deps.items()] + + [(name, dep, []) for name, dep in named_deps_items] + [(None, dep, flags) for dep, flags in flagged_deps] ] @@ -294,17 +316,15 @@ def gather_explicit_sysroot_deps(dep_ctx: DepCollectionContext) -> list[RustOrNa def resolve_deps( ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[RustOrNativeDependency]: - # The `getattr`s are needed for when we're operating on - # `prebuilt_rust_library` rules, which don't have those attrs. dependencies = _do_resolve_deps( deps = ctx.attrs.deps, - named_deps = getattr(ctx.attrs, "named_deps", {}), - flagged_deps = getattr(ctx.attrs, "flagged_deps", []), + named_deps = ctx.attrs.named_deps, + flagged_deps = ctx.attrs.flagged_deps, ) if dep_ctx.include_doc_deps: dependencies.extend(_do_resolve_deps( - deps = ctx.attrs.doc_deps, + deps = getattr(ctx.attrs, "doc_deps", []), named_deps = getattr(ctx.attrs, "doc_named_deps", {}), )) @@ -331,6 +351,7 @@ def resolve_rust_deps_inner( rust_deps.append(RustDependency( info = info, label = label, + dep = dep.dep, name = dep.name, flags = dep.flags, proc_macro_marker = proc_macro_marker, @@ -377,9 +398,13 @@ def inherited_exported_link_deps(ctx: AnalysisContext, dep_ctx: DepCollectionCon deps = {} for dep in _native_link_dependencies(ctx, dep_ctx): deps[dep.label] = dep - for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): - for dep in info.exported_link_deps: + for dep in resolve_rust_deps(ctx, dep_ctx): + if dep.proc_macro_marker != None: + continue + + for dep in dep.info.exported_link_deps: deps[dep.label] = dep + return deps.values() def inherited_rust_cxx_link_group_info( @@ -411,19 +436,27 @@ def inherited_rust_cxx_link_group_info( # handle labels that are mutated by version alias executable_deps.append(g.nodes.value.label) + public_link_group_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_mappings, + executable_deps, + link_group, + ) + linked_link_groups = create_link_groups( ctx = ctx, link_groups = link_groups, + link_strategy = link_strategy, link_group_mappings = link_group_mappings, link_group_preferred_linkage = link_group_preferred_linkage, executable_deps = executable_deps, linker_flags = [], link_group_specs = auto_link_group_specs, - root_link_group = link_group, linkable_graph_node_map = linkable_graph_node_map, other_roots = [], prefer_stripped_objects = False, # Does Rust ever use stripped objects? anonymous = ctx.attrs.anonymous_link_groups, + public_nodes = public_link_group_nodes, ) auto_link_groups = {} @@ -434,7 +467,8 @@ def inherited_rust_cxx_link_group_info( if linked_link_group.library != None: link_group_libs[name] = linked_link_group.library - labels_to_links_map = get_filtered_labels_to_links_map( + labels_to_links = get_filtered_labels_to_links_map( + public_link_group_nodes, linkable_graph_node_map, link_group, link_groups, @@ -453,23 +487,28 @@ def inherited_rust_cxx_link_group_info( ) return RustCxxLinkGroupInfo( - filtered_links = get_filtered_links(labels_to_links_map), + filtered_links = get_filtered_links(labels_to_links.map), symbol_files_info = LinkInfo( pre_flags = linked_link_groups.symbol_ldflags, ), - filtered_targets = get_filtered_targets(labels_to_links_map), + filtered_targets = get_filtered_targets(labels_to_links.map), link_group_info = link_group_info, link_group_libs = link_group_libs, - labels_to_links_map = labels_to_links_map, + labels_to_links_map = labels_to_links.map, + targets_consumed_by_link_groups = linked_link_groups.targets_consumed_by_link_groups, link_group_preferred_linkage = link_group_preferred_linkage, ) def inherited_merged_link_infos( ctx: AnalysisContext, - dep_ctx: DepCollectionContext) -> list[MergedLinkInfo]: - infos = [] - infos.extend([d[MergedLinkInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) - infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) + dep_ctx: DepCollectionContext) -> dict[ConfiguredTargetLabel, MergedLinkInfo]: + infos = {} + for d in _native_link_dependencies(ctx, dep_ctx): + g = d.get(MergedLinkInfo) + if g: + infos[d.label.configured_target()] = g + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + infos.update(info.merged_link_infos) return infos def inherited_shared_libs( @@ -504,20 +543,22 @@ def inherited_rust_external_debug_info( ctx: AnalysisContext, dep_ctx: DepCollectionContext, link_strategy: LinkStrategy) -> list[ArtifactTSet]: - return [strategy_info(d.info, link_strategy).external_debug_info for d in resolve_rust_deps(ctx, dep_ctx)] + toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] + return [strategy_info(toolchain_info, d.info, link_strategy).external_debug_info for d in resolve_rust_deps(ctx, dep_ctx)] def inherited_external_debug_info( ctx: AnalysisContext, dep_ctx: DepCollectionContext, - dwo_output_directory: [Artifact, None], + dwo_output_directory: Artifact | None, dep_link_strategy: LinkStrategy) -> ArtifactTSet: inherited_debug_infos = [] inherited_link_infos = [] + toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] for d in resolve_deps(ctx, dep_ctx): if RustLinkInfo in d.dep: - inherited_debug_infos.append(strategy_info(d.dep[RustLinkInfo], dep_link_strategy).external_debug_info) - inherited_link_infos.append(d.dep[RustLinkInfo].merged_link_info) + inherited_debug_infos.append(strategy_info(toolchain_info, d.dep[RustLinkInfo], dep_link_strategy).external_debug_info) + inherited_link_infos.extend(d.dep[RustLinkInfo].merged_link_infos.values()) elif MergedLinkInfo in d.dep: inherited_link_infos.append(d.dep[MergedLinkInfo]) @@ -531,8 +572,8 @@ def inherited_external_debug_info( children = inherited_debug_infos, ) -def normalize_crate(label: str) -> str: - return label.replace("-", "_") +def normalize_crate(label: str | ResolvedStringWithMacros) -> str | ResolvedStringWithMacros: + return label.replace("-", "_") if is_string(label) else label def attr_simple_crate_for_filenames(ctx: AnalysisContext) -> str: """ @@ -565,6 +606,15 @@ def attr_crate(ctx: AnalysisContext) -> CrateName: if dynamic: dynamic = dynamic.get(DefaultInfo).default_outputs[0] return CrateName( - simple = ctx.attrs.crate or normalize_crate(ctx.label.name), + simple = normalize_crate(ctx.attrs.crate or ctx.label.name), dynamic = dynamic, ) + +def attr_soname(ctx: AnalysisContext) -> str: + """ + Get the shared library name to set for the given rust library. + """ + linker_info = get_cxx_toolchain_info(ctx).linker_info + if ctx.attrs.soname != None: + return get_shared_library_name_for_param(linker_info, ctx.attrs.soname) + return get_default_shared_library_name(linker_info, ctx.label) diff --git a/prelude/rust/linkable_symbol.bzl b/prelude/rust/linkable_symbol.bzl new file mode 100644 index 00000000000..d1e29554875 --- /dev/null +++ b/prelude/rust/linkable_symbol.bzl @@ -0,0 +1,152 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Example: + + buck_genrule( + name = "my-generated-data" + bash = "something slow", + ) + + rust_linkable_symbol( + name = "my-generated-data-symbol", + content_str = ":my-generated-data", # or `content_bytes` for non-utf8 + ) + + rust_binary( + name = "whoa", + srcs = ..., + deps = [ + ... + ":my-generated-data-symbol", + ], + ) + +The generated Rust library contains a get() function that returns your symbol's +data as &'static str or &'static [u8], depending on whether you used str or +bytes in the rust_linkable_symbol target. + + fn main() { + let my_generated_data = my_generated_data_symbol::get(); + println!("{:?}", my_generated_data); + } + +The major advantage of rust_linkable_symbol over directly using include_bytes +with a mapped_srcs in your Rust target is that your slow genrule does not have +to get built when you're doing typecheck-only builds of the Rust code. That +applies to all of the following situations: + + - `arc rust-check` a.k.a. `buck2 build :whoa[check]` + + - documentation builds: `buck2 build :whoa[doc]` + + - all building performed by IDE +""" + +load("@prelude//rust:link_info.bzl", "RustLinkInfo") # @oss-enable +load("@prelude//prelude.bzl", prelude = "native") # @oss-enable +# @oss-disable: load("@fbcode//buck2/facebook:autodeps_hacks.bzl", "RustLinkInfo", "prelude") + +def _remove_rust_link_info_impl(ctx: AnalysisContext) -> list[Provider]: + out = [] + for p in ctx.attrs.base.providers: + if not isinstance(p, RustLinkInfo): + out.append(p) + return out + +_remove_rust_link_info = rule( + impl = _remove_rust_link_info_impl, + attrs = { + "base": attrs.dep(), + "labels": attrs.list(attrs.string()), + }, +) + +def rust_linkable_symbol( + name, + content_str = None, + content_bytes = None, + align_bytes = None, + visibility = None, + rust_library_macro = None): + if (content_str == None) == (content_bytes == None): + fail("rust_linkable_symbol requires exactly one of `content_str =` or `content_bytes =` to be passed") + + if align_bytes != None: + if content_bytes == None: + fail("rust_linkable_symbol's align_bytes is only supported when using content_bytes") + if align_bytes not in [2, 4, 8]: + fail("unsupported rust_linkable_symbol alignment") + + kind, content = ("str", content_str) if content_str else ("bytes", content_bytes) + + rust_library_macro = rust_library_macro or prelude.rust_library + + # Rustc shouldn't be the easiest way to accomplish this but here we are. + # + # Background reading: + # https://tratt.net/laurie/blog/2022/whats_the_most_portable_way_to_include_binary_blobs_in_an_executable.html + # + # Maybe use `#embed` eventually (several years from now?). + # https://www.open-std.org/jtc1/sc22/wg14/www/docs/n3017.htm + rust_library_macro( + name = "{}@symbol".format(name), + crate = name, + doctests = False, + env = { + "LINKABLE_SYMBOL": "{}:{}".format(package_name(), name), + }, + labels = [ + "generated", + "rustc_do_not_check", + ], + mapped_srcs = { + "prelude//rust/tools:linkable_symbol.rs": "lib.rs", + content: "content", + }, + rustc_flags = [ + "--cfg=rust_linkable_symbol_content_{}".format(kind), + "--cfg=rust_linkable_symbol_align_bytes=\"{}\"".format(align_bytes or 1), + "@$(location prelude//rust/tools:linkable_symbol_supports_no_std)", + ], + visibility = [], + ) + + # Alias the Rust library with a rule that just removes the `RustLinkInfo`. + # This causes the dependent library to be treated more like a C++ dep than a + # Rust dep, and thereby not be needed during type checking. + _remove_rust_link_info( + name = "{}@link".format(name), + base = ":{}@symbol".format(name), + labels = ["generated"], + ) + + rust_library_macro( + name = name, + deps = [ + ":{}@link".format(name), + ], + doctests = False, + env = { + "LINKABLE_SYMBOL": "{}:{}".format(package_name(), name), + }, + labels = [ + "generated", + ], + mapped_srcs = { + "prelude//rust/tools:linkable_symbol.rs": "lib.rs", + }, + rustc_flags = [ + "--cfg=rust_linkable_symbol_getter_{}".format(kind), + "--cfg=rust_linkable_symbol_align_bytes=\"{}\"".format(align_bytes or 1), + # Setting `no_std` here is unconditionally fine - a panic handler will + # be provided by whatever uses this library. + "--cfg=set_nostd", + ], + visibility = visibility, + ) diff --git a/prelude/rust/named_deps.bzl b/prelude/rust/named_deps.bzl new file mode 100644 index 00000000000..99ccd742656 --- /dev/null +++ b/prelude/rust/named_deps.bzl @@ -0,0 +1,35 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//utils:argfile.bzl", "at_argfile") +load("@prelude//utils:type_defs.bzl", "is_list") +load(":context.bzl", "CompileContext") + +# Write a file containing all the dynamically-generated dependency names. This +# isn't used in the course of any Buck builds, but is needed by rust-project to +# supply an accurate dependency graph to rust-analyzer.. +def write_named_deps_names( + ctx: AnalysisContext, + compile_ctx: CompileContext) -> Artifact | None: + if not is_list(ctx.attrs.named_deps): + return None + + named_deps_names = ctx.actions.declare_output("named_deps") + ctx.actions.run( + cmd_args( + compile_ctx.toolchain_info.rustc_action, + cmd_args(named_deps_names.as_output(), format = "--echo={}"), + at_argfile( + actions = ctx.actions, + name = "named_deps.args", + args = [name for name, _dep in ctx.attrs.named_deps], + allow_args = True, + ), + ), + category = "named_deps", + ) + return named_deps_names diff --git a/prelude/rust/outputs.bzl b/prelude/rust/outputs.bzl new file mode 100644 index 00000000000..8b04722e325 --- /dev/null +++ b/prelude/rust/outputs.bzl @@ -0,0 +1,47 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load( + "@prelude//:artifact_tset.bzl", + "ArtifactTSet", # @unused Used as a type +) + +RustcOutput = record( + output = field(Artifact), + stripped_output = field(Artifact), + diag_txt = field(Artifact), + diag_json = field(Artifact), + pdb = field(Artifact | None), + dwp_output = field(Artifact | None), + # Zero or more Split DWARF debug info files are emitted into this directory + # with unpredictable filenames. + dwo_output_directory = field(Artifact | None), + extra_external_debug_info = field(list[ArtifactTSet]), +) + +def output_as_diag_subtargets(o: RustcOutput, clippy: RustcOutput) -> dict[str, Artifact]: + return { + "check": o.output, + "clippy.json": clippy.diag_json, + "clippy.txt": clippy.diag_txt, + "diag.json": o.diag_json, + "diag.txt": o.diag_txt, + } + +# Access to additional outputs from Rust compilation. +# +# This provider is intended to be available from all rules that compile Rust +# code. As a result, it must be different from `RustLinkInfo`, since it should +# not exist on a prebuilt Rust library, but should exist on a binary. +RustcExtraOutputsInfo = provider( + fields = { + "clippy": RustcOutput, + "clippy_incr": RustcOutput, + "metadata": RustcOutput, + "metadata_incr": RustcOutput, + }, +) diff --git a/prelude/rust/rust-analyzer/check.bxl b/prelude/rust/rust-analyzer/check.bxl index 252d6a5fb3d..191d74b0699 100644 --- a/prelude/rust/rust-analyzer/check.bxl +++ b/prelude/rust/rust-analyzer/check.bxl @@ -5,26 +5,32 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -def check_targets_impl(ctx): - target_universe = ctx.uquery().owner(ctx.cli_args.file) - owners = ctx.cquery().owner(ctx.cli_args.file, target_universe) +load("@prelude//rust:outputs.bzl", "RustcExtraOutputsInfo") + +def check_targets_impl(ctx: bxl.Context) -> None: + uquery_owners = ctx.uquery().owner(ctx.cli_args.file) + target_universe = ctx.target_universe(uquery_owners) + owners = ctx.cquery().owner(ctx.cli_args.file, target_universe.target_set()) nodes = ctx.cquery().kind("^(rust_binary|rust_library|rust_test)$", owners) if len(nodes) == 0: return - diag_kind = "clippy.json" if ctx.cli_args.use_clippy else "diag.json" - build_result = ctx.build([ - node.label.with_sub_target(diag_kind) - for node in nodes - ]) + analysis = ctx.analysis(nodes).values() + + artifacts = [] + for a in analysis: + o = a.providers()[RustcExtraOutputsInfo] + if ctx.cli_args.use_clippy: + artifacts.append(o.clippy_incr.diag_json) + else: + artifacts.append(o.metadata_incr.diag_json) - dict_output = ctx.output.ensure_multiple(build_result) + art_output = ctx.output.ensure_multiple(artifacts) out = [ - artifacts[0].abs_path() - for artifacts in dict_output.values() - if len(artifacts) == 1 + artifact.abs_path() + for artifact in art_output ] ctx.output.print_json(out) diff --git a/prelude/rust/rust-analyzer/provider.bzl b/prelude/rust/rust-analyzer/provider.bzl new file mode 100644 index 00000000000..b764c5829ee --- /dev/null +++ b/prelude/rust/rust-analyzer/provider.bzl @@ -0,0 +1,79 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//rust:build.bzl", "crate_root", "process_env") +load( + "@prelude//rust:context.bzl", + "CompileContext", # @unused Used as a type + "DepCollectionContext", # @unused Used as a type +) +load("@prelude//rust:link_info.bzl", "get_available_proc_macros", "resolve_rust_deps") + +RustAnalyzerInfo = provider( + fields = { + # The root source for the rust target (typically lib.rs, main.rs), relative to the buck target file. + "crate_root": str, + # The processed env as produced by the buck build prelude. Some env vars like `OUT_DIR` and `CARGO_MANIFEST_DIR` + # will be made into absolute paths. + "env": dict[str, cmd_args], + # The list of rust deps needed for RustAnalyzer to function. Namely, this excludes things like + # exec deps used as inputs to genrules and other non-rust dependencies. + "rust_deps": list[Dependency], + # The list of recursive rust dependencies for this target, including proc macros. Useful for + # identifying the targets needing to be collected into Rust Analyzer's crate graph. Notably, + # excludes rust dependencies that are used in build tools (e.g. build scripts). + "transitive_target_set": set[TargetLabel], + }, +) + +def _compute_rust_deps( + ctx: AnalysisContext, + dep_ctx: DepCollectionContext) -> list[Dependency]: + dep_ctx = DepCollectionContext( + advanced_unstable_linking = dep_ctx.advanced_unstable_linking, + # Include doc deps here for any doctests that may be present in the target. + include_doc_deps = True, + is_proc_macro = dep_ctx.is_proc_macro, + # Rust Analyzer handles the sysroot separately. We omit the sysroot deps here and will + # instead pass a path to the sysroot as a separate config. + explicit_sysroot_deps = None, + panic_runtime = dep_ctx.panic_runtime, + ) + + first_order_deps = resolve_rust_deps(ctx, dep_ctx) + available_proc_macros = get_available_proc_macros(ctx) + + return [dep.dep for dep in first_order_deps] + available_proc_macros.values() + +def _compute_transitive_target_set( + ctx: AnalysisContext, + first_order_deps: list[Dependency]) -> set[TargetLabel]: + transitive_targets = set([ctx.label.raw_target()]) + for dep in first_order_deps: + target_sets = dep.get(RustAnalyzerInfo).transitive_target_set + for target_set in target_sets: + transitive_targets.add(target_set) + return transitive_targets + +def _compute_env( + ctx: AnalysisContext, + compile_ctx: CompileContext) -> dict[str, cmd_args]: + # Disable rustc_action processing, as rust-project will handle windows + any escaping necessary. + plain_env, path_env = process_env(compile_ctx, ctx.attrs.env, False, False) + return plain_env | path_env + +def rust_analyzer_provider( + ctx: AnalysisContext, + compile_ctx: CompileContext, + default_roots: list[str]) -> RustAnalyzerInfo: + rust_deps = _compute_rust_deps(ctx, compile_ctx.dep_ctx) + return RustAnalyzerInfo( + crate_root = crate_root(ctx, default_roots), + env = _compute_env(ctx, compile_ctx), + rust_deps = rust_deps, + transitive_target_set = _compute_transitive_target_set(ctx, rust_deps), + ) diff --git a/prelude/rust/rust-analyzer/resolve_deps.bxl b/prelude/rust/rust-analyzer/resolve_deps.bxl index 61dd48b896b..ac94d320146 100644 --- a/prelude/rust/rust-analyzer/resolve_deps.bxl +++ b/prelude/rust/rust-analyzer/resolve_deps.bxl @@ -6,16 +6,43 @@ # of this source tree. load("@prelude//linking:link_info.bzl", "LinkStrategy") +load("@prelude//rust:build_params.bzl", "MetadataKind") load("@prelude//rust:link_info.bzl", "RustLinkInfo") +load("@prelude//rust/rust-analyzer:provider.bzl", "RustAnalyzerInfo") +load("@prelude//utils:type_defs.bzl", "is_list") -def materialize(ctx, target): +TargetInfo = dict[str, typing.Any] + +MacroOutput = record( + actual = TargetLabel, + dylib = Artifact, +) + +ExpandedAndResolved = record( + expanded_targets = list[TargetLabel], + queried_proc_macros = dict[TargetLabel, MacroOutput], + resolved_deps = dict[TargetLabel, TargetInfo], +) + +def materialize( + ctx: bxl.Context, + target: bxl.ConfiguredTargetNode) -> Artifact: analysis = ctx.analysis(target) sources = analysis.providers()[DefaultInfo].sub_targets["sources"][DefaultInfo].default_outputs[0] + return sources + +def _get_nullable_attr(attrs, key: str) -> typing.Any: + nullable = getattr(attrs, key, None) + return nullable.value() if nullable != None else None - # Ensures the srcs folder will be present - return ctx.output.ensure(sources).abs_path() +def _process_target_config( + ctx: bxl.Context, + target: bxl.ConfiguredTargetNode, + analysis: bxl.AnalysisResult, + in_workspace: bool) -> TargetInfo: + providers = analysis.providers() + ra_info = providers[RustAnalyzerInfo] -def _process_target_config(ctx, target, in_workspace, out_dir = None): # convert all source paths to absolute paths resolved_attrs = target.resolved_attrs_eager(ctx) @@ -23,24 +50,28 @@ def _process_target_config(ctx, target, in_workspace, out_dir = None): # For example, this is used in cxx powered crates internally srcs = [] for src in resolved_attrs.srcs: - srcs.append(ctx.output.ensure(src).abs_path()) + srcs.append(src) # remove the configured platform from the deps. for example, # `fbsource//third-party/rust:tracing (ovr_config//platform/linux:x86_64-fbcode-platform010-clang-9f23200ddcddc3cb)` # becomes `fbsource//third-party/rust:tracing`. - deps = [] - for dep in resolved_attrs.deps: - deps.append(dep.label.raw_target()) + deps = [dep.label.raw_target() for dep in ra_info.rust_deps] # Grab only the values that the the gen-rules are being mapped to. mapped_srcs = {} for key, v in resolved_attrs.mapped_srcs.items(): - mapped_srcs[v] = ctx.output.ensure(key).abs_path() + mapped_srcs[v] = key # remove the configured platform from named deps. - named_deps = {} - for dep, alias in resolved_attrs.named_deps.items(): - named_deps[dep] = alias.label.raw_target() + if is_list(resolved_attrs.named_deps): + named_deps_names = providers[DefaultInfo].sub_targets["named_deps"][DefaultInfo].default_outputs[0] + named_deps = [named_deps_names] + for _alias, dep in resolved_attrs.named_deps: + named_deps.append(dep.label.raw_target()) + else: + named_deps = {} + for dep, alias in resolved_attrs.named_deps.items(): + named_deps[dep] = alias.label.raw_target() # remove the configured platform for tests tests = [] @@ -50,117 +81,79 @@ def _process_target_config(ctx, target, in_workspace, out_dir = None): # materialize a file containing the dynamic crate name crate_dynamic = getattr(resolved_attrs, "crate_dynamic", None) if crate_dynamic: - cratename_artifact = crate_dynamic.get(DefaultInfo).default_outputs[0] - crate_dynamic = ctx.output.ensure(cratename_artifact).abs_path() + crate_dynamic = crate_dynamic.get(DefaultInfo).default_outputs[0] + + env = {k: cmd_args(v, delimiter = "") for k, v in ra_info.env.items()} # copy over the absolute paths and raw targets into the output - copy = {} attrs = target.attrs_eager() - for k in dir(attrs): - if k == "srcs": - copy["srcs"] = srcs - elif k == "deps": - copy["deps"] = deps - elif k == "mapped_srcs": - copy["mapped_srcs"] = mapped_srcs - elif k == "named_deps": - copy["named_deps"] = named_deps - elif k == "tests": - copy["tests"] = tests - elif k == "crate_dynamic": - copy["crate_dynamic"] = crate_dynamic - else: - copy[k] = getattr(attrs, k) - - # Always generate the source folder. Let rust-project resolve whether or not to use it - copy["source_folder"] = materialize(ctx, target) - copy["label"] = target.label.raw_target() - copy["project_relative_buildfile"] = ctx.fs.project_rel_path(target.buildfile_path) - copy["kind"] = target.rule_type - copy["in_workspace"] = in_workspace - if out_dir: - copy["out_dir"] = out_dir - - return copy - -def cquery_deps(ctx, top_targets, workspaces, actions): + return { + "crate": _get_nullable_attr(attrs, "crate"), + "crate_dynamic": crate_dynamic, + "crate_root": ra_info.crate_root, + "deps": deps, + "edition": _get_nullable_attr(attrs, "edition"), + "env": env, + "features": resolved_attrs.features, + "in_workspace": in_workspace, + "kind": target.rule_type, + "label": target.label.raw_target(), + "mapped_srcs": mapped_srcs, + "name": resolved_attrs.name, + "named_deps": named_deps, + "proc_macro": _get_nullable_attr(attrs, "proc_macro"), + "project_relative_buildfile": ctx.fs.project_rel_path(target.buildfile_path), + "rustc_flags": _get_nullable_attr(attrs, "rustc_flags"), + "source_folder": materialize(ctx, target), # Always generate the source folder. Let rust-project resolve whether or not to use it + "srcs": srcs, + "tests": tests, + } + +def cquery_deps( + ctx: bxl.Context, + top_targets: list[TargetLabel], + workspaces: list[TargetLabel]) -> dict[TargetLabel, TargetInfo]: + targets = set() target_universe = ctx.target_universe(top_targets).target_set() - targets = ctx.cquery().deps(target_universe) - outputs = ctx.cquery().kind("^(rust_binary|rust_library|rust_test)$", targets) + analysis_set = ctx.analysis(target_universe) + for _target, analysis in analysis_set.items(): + info = analysis.providers().get(RustAnalyzerInfo) + if info: + for target_set in info.transitive_target_set: + targets.add(target_set) + + #TODO(romanp) support set as target_universe arg + outputs = ctx.target_universe(list(targets)).target_set() out = {} # Eagerly analyze targets - ctx.analysis(outputs) + analysis = ctx.analysis(outputs) - seen = {} for target in outputs: + attrs = target.attrs_lazy() + in_workspace = target.label.raw_target() in top_targets - for candidate_workspace in target.attrs_lazy().get("_workspaces").value(): - if candidate_workspace.raw_target() in workspaces: - in_workspace = True - - labels = target.attrs_lazy().get("labels") - if "thrift_library-rust" in labels.value(): - thrift_files = materialize_generated_thrift(ctx, target, actions, seen) - cfg = _process_target_config(ctx, target, in_workspace) - for thrift in thrift_files: - if thrift["mapped_src"] == "lib.rs": - cfg["crate_root"] = thrift["artifact"] - out[target.label.raw_target()] = cfg - elif "generated_protobuf_library_rust" in labels.value(): - protobuf_out_dir = materialize_generated_protobufs(ctx, target, actions, seen) - out[target.label.raw_target()] = _process_target_config(ctx, target, in_workspace, protobuf_out_dir) - else: - out[target.label.raw_target()] = _process_target_config(ctx, target, in_workspace) - return out + candidate_workspaces = attrs.get("_workspaces") + if candidate_workspaces: + for candidate_workspace in candidate_workspaces.value(): + if candidate_workspace.raw_target() in workspaces: + in_workspace = True + break + + target_info = _process_target_config( + ctx = ctx, + target = target, + analysis = analysis[target.label.with_sub_target()], + in_workspace = in_workspace, + ) + + out[target.label.raw_target()] = target_info -def materialize_generated_protobufs(ctx, target, actions, seen): - """If `target` has a dependency that generates code from protobufs, - materialize the generated code and return the path to the output directory. - """ - prost_target = target.attrs_lazy().get("named_deps").value().get("generated_prost_target") - t = prost_target.raw_target() - analysis = ctx.analysis(t) - output = analysis.providers()[DefaultInfo].default_outputs[0] - outfile = "{}/{}/{}".format(t.cell, t.package, t.name) - - if outfile in seen: - return None - seen[outfile] = () - - copied = ctx.output.ensure(actions.copy_file(outfile, output)) - return copied.abs_path() - -def materialize_generated_thrift(ctx, target, actions, seen): - mapped_srcs = target.attrs_lazy().get("mapped_srcs").value() - built = ctx.build(mapped_srcs.keys()) - out = [] - for label, artifacts in built.items(): - mapped_src = mapped_srcs.get(label) - - outfile = "{}/{}/{}/{}".format(target.label.cell, target.label.package, target.label.name, mapped_src) - if outfile in seen: - continue - - if label.sub_target: - label = "{}[{}]".format(label.raw_target(), label.sub_target[0]) - else: - label = label.raw_target() - - if len(artifacts.artifacts()) > 0: - copied = actions.copy_file(outfile, artifacts.artifacts()[0]) - copied = ctx.output.ensure(copied) - artifact = { - "artifact": copied.abs_path(), - "label": label, - "mapped_src": mapped_src, - } - out.append(artifact) - - seen[outfile] = () return out -def expand_proc_macros(ctx, targets): +def expand_proc_macros( + ctx: bxl.Context, + targets: list[TargetLabel]) -> dict[TargetLabel, MacroOutput]: target_universe = ctx.target_universe(targets).target_set() targets = ctx.cquery().deps(target_universe) targets = ctx.cquery().kind("^(rust_binary|rust_library)$", targets) @@ -171,13 +164,19 @@ def expand_proc_macros(ctx, targets): proc_macro = getattr(attrs, "proc_macro", False) if proc_macro: analysis = ctx.analysis(target) - rlib = analysis.providers()[RustLinkInfo].strategies[LinkStrategy("shared")].rlib + rlib = analysis.providers()[RustLinkInfo].strategies[LinkStrategy("shared")].outputs[MetadataKind("link")] label = target.label.raw_target() - out[label] = {"actual": label, "dylib": ctx.output.ensure(rlib).abs_path()} + out[label] = MacroOutput( + actual = label, + dylib = rlib, + ) return out # Returns a list of all the expanded targets including any workspaces, followed by just the workspaces -def expand_targets(ctx, targets): +def expand_targets( + ctx: bxl.Context, + targets: list[TargetLabel], + exclude_workspaces: bool) -> (list[TargetLabel], list[TargetLabel]): target_universe = ctx.target_universe(targets).target_set() kind_target_list = ctx.cquery().kind("^(rust_binary|rust_library|rust_test|alias)$", target_universe) @@ -188,21 +187,28 @@ def expand_targets(ctx, targets): # Map of potential workspaces to a list of the targets that name these as potential workspaces possible_workspaces = {} - for label, t in expanded_targets.items(): - workspaces = t.attrs_lazy().get("_workspaces") - if workspaces: - for workspace in workspaces.value(): - if not ctx.target_exists(str(workspace.raw_target())): - continue + if not exclude_workspaces: + for label, t in expanded_targets.items(): + workspaces = t.attrs_lazy().get("_workspaces") + if workspaces: + for workspace in workspaces.value(): + if not ctx.target_exists(str(workspace.raw_target())): + continue + + possible_workspaces.setdefault(workspace.raw_target(), []).append(label) - possible_workspaces.setdefault(workspace.raw_target(), []).append(label) + workspace_analysis = ctx.analysis(ctx.target_universe(possible_workspaces.keys()).target_set()) active_workspaces = {} - for workspace, candidate_deps in possible_workspaces.items(): - # FIXME: Using `cquery deps` here is not right. It will transparently look through - # dependency edges of all types, meaning that eg build tools written in Rust and built - # from source will show up too - workspace_deps = {d.label.raw_target(): () for d in ctx.cquery().deps(workspace)} + for workspace_label, analysis in workspace_analysis.items(): + workspace = workspace_label.raw_target() + candidate_deps = possible_workspaces[workspace] + workspace_info = analysis.providers().get(RustAnalyzerInfo) + if workspace_info: + workspace_deps = {t: () for t in workspace_info.transitive_target_set} + else: + workspace_deps = {} + for d in candidate_deps: if d in workspace_deps: active_workspaces[workspace] = () @@ -216,26 +222,115 @@ def expand_targets(ctx, targets): # in the prelude are a bit hard in general expanded_targets.pop(d, None) - return dedupe(sorted(expanded_targets.keys() + active_workspaces.keys())), sorted(active_workspaces.keys()) + return dedupe(sorted(expanded_targets.keys() + active_workspaces.keys())), sorted(possible_workspaces.keys()) -def expand_and_resolve_impl(ctx): +def resolve_targets_impl(ctx: bxl.Context) -> None: # equivalent of `flat_map`ing targets = [target for sublist in ctx.cli_args.targets for target in sublist] actions = ctx.bxl_actions().actions - expanded_targets, workspaces = expand_targets(ctx, targets) + expanded_targets, workspaces = expand_targets(ctx, targets, ctx.cli_args.exclude_workspaces) queried_proc_macros = expand_proc_macros(ctx, expanded_targets) - resolved_deps = cquery_deps(ctx, expanded_targets, workspaces, actions) - - ctx.output.print_json({ - "expanded_targets": expanded_targets, - "queried_proc_macros": queried_proc_macros, - "resolved_deps": resolved_deps, - }) + resolved_deps = cquery_deps(ctx, expanded_targets, workspaces) + + artifact = actions.declare_output("resolve_targets.json") + artifacts = actions.write_json( + artifact, + ExpandedAndResolved( + expanded_targets = expanded_targets, + queried_proc_macros = queried_proc_macros, + resolved_deps = resolved_deps, + ), + with_inputs = True, + absolute = True, + pretty = ctx.cli_args.pretty, + ) + ctx.output.ensure_multiple(artifacts) + ctx.output.print(ctx.output.ensure(artifact).abs_path()) + +def resolve_owning_buildfile_impl(ctx: bxl.Context) -> None: + # depending on the input, determine the initial set of targets + if ctx.cli_args.files: + targets = ctx.uquery().owner(ctx.cli_args.files) + elif ctx.cli_args.buildfiles: + targets = [ctx.uquery().targets_in_buildfile(buildfile) for buildfile in ctx.cli_args.buildfiles] + + # equivalent of `flat_map`ing + targets = [target for sublist in targets for target in sublist] + targets = ctx.uquery().kind("^(rust_binary|rust_library|rust_test)$", targets) + elif ctx.cli_args.targets: + # equivalent of `flat_map`ing + targets = [target for sublist in ctx.cli_args.targets for target in sublist] + targets = ctx.unconfigured_targets(targets) + else: + fail("Neither `--files`, `--targets`, nor `--buildfiles` were specified; this is a bug") + + # group targets by their buildfile + targets_by_buildfile = {} + for target in targets: + buildfile_path = ctx.fs.abs_path_unsafe(target.buildfile_path) + + if buildfile_path not in targets_by_buildfile: + targets_by_buildfile[buildfile_path] = utarget_set() + targets_by_buildfile[buildfile_path] += utarget_set([target]) + + # collect extra targets from each buildfile + extra_targets_by_buildfile = {} + for buildfile_path in targets_by_buildfile: + extra_targets = ctx.uquery().targets_in_buildfile("{}".format(buildfile_path)) + extra_targets = ctx.uquery().kind("^(rust_binary|rust_library|rust_test)$", extra_targets) + + # Exclude targets with the rustc_do_no_check label from the extra targets. This + # label is used for foo@symbol targets (generated by rust_linkable_symbols), which + # are slow to build and never a direct dependencies of rust targets. + extra_targets -= ctx.uquery().attrfilter( + "labels", + "rustc_do_not_check", + extra_targets, + ) + + # explicitly included targets aren't "extra" + extra_targets -= targets_by_buildfile[buildfile_path] + + extra_targets_by_buildfile[buildfile_path] = extra_targets + + # add as many extra targets as we can according to max_extra_targets. + # note that which extra targets we add is arbitrary since it depends on the + # iteration order of the dict and the target_set. + remaining_extra_targets = ctx.cli_args.max_extra_targets + for buildfile_path, extra_targets in extra_targets_by_buildfile.items(): + extra_targets = utarget_set(list(extra_targets)[:remaining_extra_targets]) + targets_by_buildfile[buildfile_path] += extra_targets + + remaining_extra_targets -= len(extra_targets) + if remaining_extra_targets <= 0: + break + + # output just the target labels by buildfile + out = {} + for buildfile_path, targets in targets_by_buildfile.items(): + out[buildfile_path] = [target.label for target in targets] + ctx.output.print_json(out) -expand_and_resolve = bxl_main( - impl = expand_and_resolve_impl, +# Writes a json file as an artifact and returns the absolute path to that artifact to stdout. +resolve_targets = bxl_main( + impl = resolve_targets_impl, cli_args = { + "exclude_workspaces": cli_args.bool(default = False), + "pretty": cli_args.bool(default = False), "targets": cli_args.list(cli_args.target_expr()), }, ) + +resolve_owning_buildfile = bxl_main( + impl = resolve_owning_buildfile_impl, + cli_args = { + # while buildfiles, files, and targets can all be passed, only files will be used. + # this file is driven primarily by rust-project's needs and is a private implementation + # detail. + "buildfiles": cli_args.option(cli_args.list(cli_args.string())), + "files": cli_args.option(cli_args.list(cli_args.string())), + "max_extra_targets": cli_args.int(), + "targets": cli_args.option(cli_args.list(cli_args.target_expr())), + }, +) diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index f4cabb9ac6c..91dcf2459a2 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -26,8 +26,8 @@ load( "LINK_GROUP_MAPPINGS_SUB_TARGET", "LINK_GROUP_MAP_DATABASE_SUB_TARGET", "LinkGroupContext", + "build_shared_libs_for_symlink_tree", "get_link_group_map_json", - "is_link_group_shlib", ) load("@prelude//cxx:linker.bzl", "DUMPBIN_SUB_TARGET", "PDB_SUB_TARGET", "get_dumpbin_providers", "get_pdb_providers") load( @@ -44,11 +44,11 @@ load( "traverse_shared_library_info", ) load("@prelude//os_lookup:defs.bzl", "OsLookup") +load("@prelude//rust/rust-analyzer:provider.bzl", "rust_analyzer_provider") load( "@prelude//tests:re_utils.bzl", "get_re_executors_from_props", ) -load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type load("@prelude//utils:utils.bzl", "flatten_dict") load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") load( @@ -56,10 +56,10 @@ load( "compile_context", "generate_rustdoc", "rust_compile", - "rust_compile_multi", ) load( ":build_params.bzl", + "BuildParams", # @unused Used as a type "Emit", "LinkageLang", "RuleType", @@ -76,17 +76,28 @@ load( "inherited_rust_cxx_link_group_info", "inherited_shared_libs", ) +load(":named_deps.bzl", "write_named_deps_names") +load(":outputs.bzl", "RustcExtraOutputsInfo", "output_as_diag_subtargets") load(":resources.bzl", "rust_attr_resources") -_CompileOutputs = record( - link = field(Artifact), - args = field(ArgLike), - extra_targets = field(list[(str, Artifact)]), - runtime_files = field(list[ArgLike]), - external_debug_info = field(list[TransitiveSetArgsProjection]), - sub_targets = field(dict[str, list[DefaultInfo]]), - dist_info = DistInfo, -) +def _strategy_params( + ctx: AnalysisContext, + compile_ctx: CompileContext) -> dict[LinkStrategy, BuildParams]: + target_os_type = ctx.attrs._target_os_type[OsLookup] + linker_type = compile_ctx.cxx_toolchain_info.linker_info.type + + params = {} + for link_strategy in LinkStrategy: + params[link_strategy] = build_params( + rule = RuleType("binary"), + proc_macro = False, + link_strategy = link_strategy, + lib_output_style = None, + lang = LinkageLang("rust"), + linker_type = linker_type, + target_os_type = target_os_type, + ) + return params def _rust_binary_common( ctx: AnalysisContext, @@ -98,16 +109,7 @@ def _rust_binary_common( simple_crate = attr_simple_crate_for_filenames(ctx) - styles = {} - dwp_target = None - pdb = None - strategy_param = {} # strategy -> param - sub_targets = {} - - specified_link_strategy = LinkStrategy(ctx.attrs.link_style) if ctx.attrs.link_style else DEFAULT_STATIC_LINK_STRATEGY - - target_os_type = ctx.attrs._target_os_type[OsLookup] - linker_type = compile_ctx.cxx_toolchain_info.linker_info.type + link_strategy = LinkStrategy(ctx.attrs.link_style) if ctx.attrs.link_style else DEFAULT_STATIC_LINK_STRATEGY resources = flatten_dict(gather_resources( label = ctx.label, @@ -115,248 +117,287 @@ def _rust_binary_common( deps = cxx_attr_deps(ctx), ).values()) - for link_strategy in LinkStrategy: - # Unlike for libraries, there's no possibility of different link styles - # resulting in the same build params, so no need to deduplicate. - params = build_params( - rule = RuleType("binary"), - proc_macro = False, - link_strategy = link_strategy, - lib_output_style = None, - lang = LinkageLang("rust"), - linker_type = linker_type, - target_os_type = target_os_type, - ) - strategy_param[link_strategy] = params - name = link_strategy.value + "/" + output_filename(simple_crate, Emit("link"), params) - output = ctx.actions.declare_output(name) - - # Gather and setup symlink tree of transitive shared library deps. - shared_libs = {} - - rust_cxx_link_group_info = None - link_group_mappings = {} - link_group_libs = {} - link_group_preferred_linkage = {} - labels_to_links_map = {} - filtered_targets = [] - - if enable_link_groups(ctx, link_strategy, specified_link_strategy, is_binary = True): - rust_cxx_link_group_info = inherited_rust_cxx_link_group_info( - ctx, - compile_ctx.dep_ctx, - link_strategy = link_strategy, - ) - link_group_mappings = rust_cxx_link_group_info.link_group_info.mappings - link_group_libs = rust_cxx_link_group_info.link_group_libs - link_group_preferred_linkage = rust_cxx_link_group_info.link_group_preferred_linkage - labels_to_links_map = rust_cxx_link_group_info.labels_to_links_map - filtered_targets = rust_cxx_link_group_info.filtered_targets - - # As per v1, we only setup a shared library symlink tree for the shared - # link style. - # XXX need link tree for dylib crates - shlib_deps = [] - if link_strategy == LinkStrategy("shared") or rust_cxx_link_group_info != None: - shlib_deps = inherited_shared_libs(ctx, compile_ctx.dep_ctx) - - shlib_info = merge_shared_libraries(ctx.actions, deps = shlib_deps) - - link_group_ctx = LinkGroupContext( - link_group_mappings = link_group_mappings, - link_group_libs = link_group_libs, - link_group_preferred_linkage = link_group_preferred_linkage, - labels_to_links_map = labels_to_links_map, - ) + extra_flags = toolchain_info.rustc_binary_flags + (extra_flags or []) - def shlib_filter(_name, shared_lib): - return not rust_cxx_link_group_info or is_link_group_shlib(shared_lib.label, link_group_ctx) + strategy_param = _strategy_params(ctx, compile_ctx) - for soname, shared_lib in traverse_shared_library_info(shlib_info, filter_func = shlib_filter).items(): - shared_libs[soname] = shared_lib.lib + params = strategy_param[link_strategy] + name = output_filename(simple_crate, Emit("link"), params) + output = ctx.actions.declare_output(name) - if rust_cxx_link_group_info: - # When there are no matches for a pattern based link group, - # `link_group_mappings` will not have an entry associated with the lib. - for _name, link_group_lib in link_group_libs.items(): - shared_libs.update(link_group_lib.shared_libs) + rust_cxx_link_group_info = None + link_group_mappings = {} + link_group_libs = {} + link_group_preferred_linkage = {} + labels_to_links_map = {} + targets_consumed_by_link_groups = {} + filtered_targets = [] - # link groups shared libraries link args are directly added to the link command, - # we don't have to add them here - executable_args = executable_shared_lib_arguments( + if enable_link_groups(ctx): + rust_cxx_link_group_info = inherited_rust_cxx_link_group_info( ctx, - compile_ctx.cxx_toolchain_info, - output, - shared_libs, + compile_ctx.dep_ctx, + link_strategy = link_strategy, ) + link_group_mappings = rust_cxx_link_group_info.link_group_info.mappings + link_group_libs = rust_cxx_link_group_info.link_group_libs + link_group_preferred_linkage = rust_cxx_link_group_info.link_group_preferred_linkage + labels_to_links_map = rust_cxx_link_group_info.labels_to_links_map + targets_consumed_by_link_groups = rust_cxx_link_group_info.targets_consumed_by_link_groups + filtered_targets = rust_cxx_link_group_info.filtered_targets + + shlib_deps = [] + if link_strategy == LinkStrategy("shared") or rust_cxx_link_group_info != None: + shlib_deps = inherited_shared_libs(ctx, compile_ctx.dep_ctx) + + shlib_info = merge_shared_libraries(ctx.actions, deps = shlib_deps) + + link_group_ctx = LinkGroupContext( + link_group_mappings = link_group_mappings, + link_group_libs = link_group_libs, + link_group_preferred_linkage = link_group_preferred_linkage, + labels_to_links_map = labels_to_links_map, + targets_consumed_by_link_groups = targets_consumed_by_link_groups, + ) - extra_flags = toolchain_info.rustc_binary_flags + (extra_flags or []) - - # Compile rust binary. - link, meta = rust_compile_multi( - ctx = ctx, - compile_ctx = compile_ctx, - emits = [Emit("link"), Emit("metadata")], - params = params, - default_roots = default_roots, - extra_link_args = executable_args.extra_link_args, - predeclared_outputs = {Emit("link"): output}, - extra_flags = extra_flags, - is_binary = True, - allow_cache_upload = allow_cache_upload, - rust_cxx_link_group_info = rust_cxx_link_group_info, - ) + # Gather and setup symlink tree of transitive shared library deps. + shared_libs = build_shared_libs_for_symlink_tree( + use_link_groups = rust_cxx_link_group_info != None, + link_group_ctx = link_group_ctx, + link_strategy = link_strategy, + shared_libraries = traverse_shared_library_info(shlib_info), + extra_shared_libraries = [], + ) - args = cmd_args(link.output).hidden(executable_args.runtime_files) - extra_targets = [("check", meta.output)] + meta.diag.items() - external_debug_info = project_artifacts( - actions = ctx.actions, - tsets = [inherited_external_debug_info( - ctx, - compile_ctx.dep_ctx, - link.dwo_output_directory, - link_strategy, - )], - ) + # link groups shared libraries link args are directly added to the link command, + # we don't have to add them here + executable_args = executable_shared_lib_arguments( + ctx, + compile_ctx.cxx_toolchain_info, + output, + shared_libs, + ) - # If we have some resources, write it to the resources JSON file and add - # it and all resources to "runtime_files" so that we make to materialize - # them with the final binary. - runtime_files = list(executable_args.runtime_files) - if resources: - resources_hidden = [create_resource_db( - ctx = ctx, - name = name + ".resources.json", - binary = output, - resources = resources, - )] - for resource in resources.values(): - resources_hidden.append(resource.default_output) - resources_hidden.extend(resource.other_outputs) - args.hidden(resources_hidden) - runtime_files.extend(resources_hidden) + # Compile rust binary. + link = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("link"), + params = params, + default_roots = default_roots, + extra_link_args = executable_args.extra_link_args, + predeclared_output = output, + extra_flags = extra_flags, + allow_cache_upload = allow_cache_upload, + rust_cxx_link_group_info = rust_cxx_link_group_info, + incremental_enabled = ctx.attrs.incremental_enabled, + ) - sub_targets_for_link_strategy = {} + args = cmd_args(link.output, hidden = executable_args.runtime_files) + external_debug_info = project_artifacts( + actions = ctx.actions, + tsets = [inherited_external_debug_info( + ctx, + compile_ctx.dep_ctx, + link.dwo_output_directory, + link_strategy, + )], + ) - sub_targets_for_link_strategy["shared-libraries"] = [DefaultInfo( - default_output = ctx.actions.write_json( - name + ".shared-libraries.json", - { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, name) for name in shared_libs.keys()], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, name) for name, lib in shared_libs.items() if lib.dwp], - "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if executable_args.shared_libs_symlink_tree else [], - }, - ), - sub_targets = { - name: [DefaultInfo( - default_output = lib.output, - sub_targets = {"dwp": [DefaultInfo(default_output = lib.dwp)]} if lib.dwp else {}, - )] - for name, lib in shared_libs.items() - }, + # If we have some resources, write it to the resources JSON file and add + # it and all resources to "runtime_files" so that we make to materialize + # them with the final binary. + runtime_files = list(executable_args.runtime_files) + if resources: + resources_hidden = [create_resource_db( + ctx = ctx, + name = name + ".resources.json", + binary = output, + resources = resources, )] + for resource in resources.values(): + resources_hidden.append(resource.default_output) + resources_hidden.extend(resource.other_outputs) + args.add(cmd_args(hidden = resources_hidden)) + runtime_files.extend(resources_hidden) + + # A simple dict of sub-target key to artifact, which we'll convert to + # DefaultInfo providers at the end + extra_compiled_targets = { + "sources": compile_ctx.symlinked_srcs, + } + sub_targets = {} - if isinstance(executable_args.shared_libs_symlink_tree, Artifact): - sub_targets_for_link_strategy["rpath-tree"] = [DefaultInfo( - default_output = executable_args.shared_libs_symlink_tree, - other_outputs = [ - lib.output - for lib in shared_libs.values() - ] + [ - lib.dwp - for lib in shared_libs.values() - if lib.dwp + # TODO(agallagher) There appears to be pre-existing soname conflicts + # when building this (when using link groups), which prevents using + # `with_unique_str_sonames`. + str_soname_shlibs = { + shlib.soname.ensure_str(): shlib + for shlib in shared_libs + if shlib.soname.is_str + } + sub_targets["shared-libraries"] = [DefaultInfo( + default_output = ctx.actions.write_json( + name + ".shared-libraries.json", + { + "libraries": [ + "{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, soname) + for soname in str_soname_shlibs ], + "librariesdwp": [ + "{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, soname) + for soname, shlib in str_soname_shlibs.items() + if shlib.lib.dwp + ], + "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if executable_args.shared_libs_symlink_tree else [], + }, + ), + sub_targets = { + soname: [DefaultInfo( + default_output = shlib.lib.output, + sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, )] + for soname, shlib in str_soname_shlibs.items() + }, + )] + + if isinstance(executable_args.shared_libs_symlink_tree, Artifact): + sub_targets["rpath-tree"] = [DefaultInfo( + default_output = executable_args.shared_libs_symlink_tree, + other_outputs = [ + shlib.lib.output + for shlib in shared_libs + ] + [ + shlib.lib.dwp + for shlib in shared_libs + if shlib.lib.dwp + ], + )] - if rust_cxx_link_group_info: - sub_targets_for_link_strategy[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [get_link_group_map_json(ctx, filtered_targets)] - readable_mappings = {} - for node, group in link_group_mappings.items(): - readable_mappings[group] = readable_mappings.get(group, []) + ["{}//{}:{}".format(node.cell, node.package, node.name)] - sub_targets_for_link_strategy[LINK_GROUP_MAPPINGS_SUB_TARGET] = [DefaultInfo( - default_output = ctx.actions.write_json( - name + LINK_GROUP_MAPPINGS_FILENAME_SUFFIX, - readable_mappings, - ), - )] - - styles[link_strategy] = _CompileOutputs( - link = link.output, - args = args, - extra_targets = extra_targets, - runtime_files = runtime_files, - external_debug_info = executable_args.external_debug_info + external_debug_info, - sub_targets = sub_targets_for_link_strategy, - dist_info = DistInfo( - shared_libs = shlib_info.set, - nondebug_runtime_files = runtime_files, + if rust_cxx_link_group_info: + sub_targets[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [get_link_group_map_json(ctx, filtered_targets)] + readable_mappings = {} + for node, group in link_group_mappings.items(): + readable_mappings[group] = readable_mappings.get(group, []) + ["{}//{}:{}".format(node.cell, node.package, node.name)] + sub_targets[LINK_GROUP_MAPPINGS_SUB_TARGET] = [DefaultInfo( + default_output = ctx.actions.write_json( + name + LINK_GROUP_MAPPINGS_FILENAME_SUFFIX, + readable_mappings, ), + )] + + # `infallible_diagnostics` allows us to circumvent compilation failures and + # treat the resulting rustc action as a success, even if a metadata + # artifact was not generated. This allows us to generate diagnostics + # even when the target has bugs. + diag_artifacts = {} + clippy_artifacts = {} + for incr in (True, False): + diag_artifacts[incr] = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("metadata-fast"), + params = strategy_param[DEFAULT_STATIC_LINK_STRATEGY], + default_roots = default_roots, + extra_flags = extra_flags, + infallible_diagnostics = True, + incremental_enabled = incr, + ) + clippy_artifacts[incr] = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("clippy"), + params = strategy_param[DEFAULT_STATIC_LINK_STRATEGY], + default_roots = default_roots, + extra_flags = extra_flags, + infallible_diagnostics = True, + incremental_enabled = incr, ) - if link_strategy == specified_link_strategy and link.dwp_output: - dwp_target = link.dwp_output - if link_strategy == specified_link_strategy and link.pdb: - pdb = link.pdb + providers = [RustcExtraOutputsInfo( + metadata = diag_artifacts[False], + metadata_incr = diag_artifacts[True], + clippy = clippy_artifacts[False], + clippy_incr = clippy_artifacts[True], + )] - expand = rust_compile( + incr_enabled = ctx.attrs.incremental_enabled + extra_compiled_targets.update(output_as_diag_subtargets(diag_artifacts[incr_enabled], clippy_artifacts[incr_enabled])) + + extra_compiled_targets["expand"] = rust_compile( ctx = ctx, compile_ctx = compile_ctx, emit = Emit("expand"), params = strategy_param[DEFAULT_STATIC_LINK_STRATEGY], default_roots = default_roots, extra_flags = extra_flags, + incremental_enabled = ctx.attrs.incremental_enabled, + ).output + + extra_compiled_targets["llvm_ir"] = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("llvm-ir"), + params = params, + default_roots = default_roots, + extra_flags = extra_flags, + incremental_enabled = ctx.attrs.incremental_enabled, + ).output + + doc_output = generate_rustdoc( + ctx = ctx, + compile_ctx = compile_ctx, + params = strategy_param[DEFAULT_STATIC_LINK_STRATEGY], + default_roots = default_roots, + document_private_items = True, ) + extra_compiled_targets["doc"] = doc_output - compiled_outputs = styles[specified_link_strategy] - extra_compiled_targets = (compiled_outputs.extra_targets + [ - ("doc", generate_rustdoc( - ctx = ctx, - compile_ctx = compile_ctx, - params = strategy_param[DEFAULT_STATIC_LINK_STRATEGY], - default_roots = default_roots, - document_private_items = True, - )), - ("expand", expand.output), - ("sources", compile_ctx.symlinked_srcs), - ]) - sub_targets.update({k: [DefaultInfo(default_output = v)] for k, v in extra_compiled_targets}) - sub_targets.update(compiled_outputs.sub_targets) - for (k, sub_compiled_outputs) in styles.items(): - sub_targets[k.value] = [ - DefaultInfo( - default_output = sub_compiled_outputs.link, - other_outputs = sub_compiled_outputs.runtime_files + sub_compiled_outputs.external_debug_info, - # Check/save-analysis for each link style? - sub_targets = sub_compiled_outputs.sub_targets, - ), - RunInfo(args = sub_compiled_outputs.args), - sub_compiled_outputs.dist_info, - ] + named_deps_names = write_named_deps_names(ctx, compile_ctx) + if named_deps_names: + extra_compiled_targets["named_deps"] = named_deps_names - if dwp_target: + if link.dwp_output: sub_targets["dwp"] = [ DefaultInfo( - default_output = dwp_target, + default_output = link.dwp_output, + other_outputs = [ + shlib.lib.dwp + for shlib in shared_libs + if shlib.lib.dwp + ], ), ] - if pdb: - sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = pdb, binary = compiled_outputs.link) + if link.pdb: + sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = link.pdb, binary = link.output) dupmbin_toolchain = compile_ctx.cxx_toolchain_info.dumpbin_toolchain_path if dupmbin_toolchain: - sub_targets[DUMPBIN_SUB_TARGET] = get_dumpbin_providers(ctx, compiled_outputs.link, dupmbin_toolchain) + sub_targets[DUMPBIN_SUB_TARGET] = get_dumpbin_providers(ctx, link.output, dupmbin_toolchain) + + sub_targets.update({ + k: [DefaultInfo(default_output = v)] + for (k, v) in extra_compiled_targets.items() + }) - providers = [ + providers += [ DefaultInfo( - default_output = compiled_outputs.link, - other_outputs = compiled_outputs.runtime_files + compiled_outputs.external_debug_info, + default_output = link.output, + other_outputs = runtime_files + executable_args.external_debug_info + external_debug_info, sub_targets = sub_targets, ), - compiled_outputs.dist_info, + DistInfo( + shared_libs = shlib_info.set, + nondebug_runtime_files = runtime_files, + ), ] - return (providers, compiled_outputs.args) + providers.append(rust_analyzer_provider( + ctx = ctx, + compile_ctx = compile_ctx, + default_roots = default_roots, + )) + return (providers, args) def rust_binary_impl(ctx: AnalysisContext) -> list[Provider]: compile_ctx = compile_context(ctx) @@ -382,7 +423,10 @@ def rust_test_impl(ctx: AnalysisContext) -> list[Provider]: providers, args = _rust_binary_common( ctx = ctx, compile_ctx = compile_ctx, - default_roots = ["main.rs", "lib.rs"], + # Unless default_roots are provided, it is ambiguous whether this test rule is invoked + # to test a binary, or to test a library. As such, we must consider both main.rs and + # lib.rs as potential candidates. + default_roots = ctx.attrs.default_roots or ["main.rs", "lib.rs"], extra_flags = extra_flags, allow_cache_upload = False, ) @@ -395,7 +439,7 @@ def rust_test_impl(ctx: AnalysisContext) -> list[Provider]: ExternalRunnerTestInfo( type = "rust", command = [args], - env = ctx.attrs.env, + env = ctx.attrs.env | ctx.attrs.run_env, labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, default_executor = re_executor, diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index e0a03090cd7..c9a714e2615 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -15,15 +15,9 @@ load( "@prelude//android:android_providers.bzl", "merge_android_packageable_info", ) -load( - "@prelude//cxx:cxx_context.bzl", - "get_cxx_toolchain_info", -) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") load( "@prelude//cxx:linker.bzl", "PDB_SUB_TARGET", - "get_default_shared_library_name", "get_pdb_providers", ) load( @@ -42,14 +36,13 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "MergedLinkInfo", # @unused Used as a type "SharedLibLinkable", "create_merged_link_info", - "create_merged_link_info_for_propagation", "get_lib_output_style", "legacy_output_style_to_link_style", + "set_link_info_link_whole", ) load( "@prelude//linking:linkable_graph.bzl", @@ -65,24 +58,24 @@ load( "create_shared_libraries", "merge_shared_libraries", ) -load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") +load("@prelude//rust/rust-analyzer:provider.bzl", "rust_analyzer_provider") +load("@prelude//unix:providers.bzl", "UnixEnv", "create_unix_env_info") load( ":build.bzl", - "RustcOutput", # @unused Used as a type "compile_context", "generate_rustdoc", "generate_rustdoc_coverage", "generate_rustdoc_test", "rust_compile", - "rust_compile_multi", ) load( ":build_params.bzl", "BuildParams", # @unused Used as a type - "CrateType", "Emit", "LinkageLang", + "MetadataKind", "RuleType", "build_params", ) @@ -94,11 +87,13 @@ load( ) load( ":link_info.bzl", + "DEFAULT_STATIC_LIB_OUTPUT_STYLE", "DEFAULT_STATIC_LINK_STRATEGY", "RustLinkInfo", "RustLinkStrategyInfo", "RustProcMacroMarker", # @unused Used as a type "attr_crate", + "attr_soname", "inherited_exported_link_deps", "inherited_link_group_lib_infos", "inherited_linkable_graphs", @@ -108,97 +103,19 @@ load( "resolve_rust_deps", "strategy_info", ) +load(":named_deps.bzl", "write_named_deps_names") +load( + ":outputs.bzl", + "RustcExtraOutputsInfo", + "RustcOutput", # @unused Used as a type + "output_as_diag_subtargets", +) load(":proc_macro_alias.bzl", "rust_proc_macro_alias") load(":resources.bzl", "rust_attr_resources") load(":rust_toolchain.bzl", "RustToolchainInfo") load(":targets.bzl", "targets") -def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: - providers = [] - - # Default output. - providers.append( - DefaultInfo( - default_output = ctx.attrs.rlib, - ), - ) - - rust_toolchain = ctx.attrs._rust_toolchain[RustToolchainInfo] - dep_ctx = DepCollectionContext( - advanced_unstable_linking = rust_toolchain.advanced_unstable_linking, - include_doc_deps = False, - is_proc_macro = False, - explicit_sysroot_deps = rust_toolchain.explicit_sysroot_deps, - panic_runtime = rust_toolchain.panic_runtime, - ) - - cxx_toolchain = get_cxx_toolchain_info(ctx) - linker_info = cxx_toolchain.linker_info - - archive_info = LinkInfos( - default = LinkInfo( - linkables = [ - ArchiveLinkable( - archive = Archive(artifact = ctx.attrs.rlib), - linker_type = linker_info.type, - ), - ], - ), - stripped = LinkInfo( - linkables = [ - ArchiveLinkable( - archive = Archive( - artifact = strip_debug_info( - ctx = ctx, - out = ctx.attrs.rlib.short_path, - obj = ctx.attrs.rlib, - ), - ), - linker_type = linker_info.type, - ), - ], - ), - ) - link_infos = {LibOutputStyle("archive"): archive_info, LibOutputStyle("pic_archive"): archive_info} - - # Rust link provider. - crate = attr_crate(ctx) - strategies = {} - for link_strategy in LinkStrategy: - tdeps, tmetadeps, external_debug_info, tprocmacrodeps = _compute_transitive_deps(ctx, dep_ctx, link_strategy) - external_debug_info = make_artifact_tset( - actions = ctx.actions, - children = external_debug_info, - ) - strategies[link_strategy] = RustLinkStrategyInfo( - rlib = ctx.attrs.rlib, - transitive_deps = tdeps, - rmeta = ctx.attrs.rlib, - transitive_rmeta_deps = tmetadeps, - transitive_proc_macro_deps = tprocmacrodeps, - pdb = None, - external_debug_info = external_debug_info, - ) - - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers( - ctx, - dep_ctx, - cxx_toolchain, - link_infos, - Linkage(ctx.attrs.preferred_linkage), - ) - providers.append( - RustLinkInfo( - crate = crate, - strategies = strategies, - exported_link_deps = inherited_link_deps, - merged_link_info = merged_link_info, - shared_libs = shared_libs, - linkable_graphs = inherited_graphs, - ), - ) - - return providers +_DEFAULT_ROOTS = ["lib.rs"] def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: compile_ctx = compile_context(ctx) @@ -209,25 +126,78 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: # distinct kinds of build we actually need to deal with. param_lang, lang_style_param = _build_params_for_styles(ctx, compile_ctx) - artifacts = _build_library_artifacts(ctx, compile_ctx, param_lang.keys()) + # Grab the artifacts to use for the check subtargets. Picking a good + # `LibOutputStyle` ensures that the subtarget shares work with the main + # build if possible + meta_params = lang_style_param[(LinkageLang("rust"), DEFAULT_STATIC_LIB_OUTPUT_STYLE)] + + meta_fast = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("metadata-fast"), + params = meta_params, + default_roots = _DEFAULT_ROOTS, + incremental_enabled = ctx.attrs.incremental_enabled, + ) + # Generate the actions to build various output artifacts. Given the set of + # parameters we need, populate maps to the linkable and metadata + # artifacts by linkage lang. rust_param_artifact = {} + rust_param_subtargets = {} native_param_artifact = {} - check_artifacts = None - - for params, (link, meta) in artifacts.items(): - if LinkageLang("rust") in param_lang[params]: - # Grab the check output for all kinds of builds to use - # in the check subtarget. The link style doesn't matter - # so pick the first. - if check_artifacts == None: - check_artifacts = {"check": meta.output} - check_artifacts.update(meta.diag) - - rust_param_artifact[params] = (link, meta) - if LinkageLang("native") in param_lang[params] or LinkageLang("native-unbundled") in param_lang[params]: + for params, langs in param_lang.items(): + link = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("link"), + params = params, + default_roots = _DEFAULT_ROOTS, + incremental_enabled = ctx.attrs.incremental_enabled, + ) + + if LinkageLang("rust") in langs: + rust_param_artifact[params] = { + MetadataKind("link"): link, + MetadataKind("full"): rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("metadata-full"), + params = params, + default_roots = _DEFAULT_ROOTS, + incremental_enabled = ctx.attrs.incremental_enabled, + ), + MetadataKind("fast"): meta_fast, + } + + rust_param_subtargets[params] = { + "llvm-ir": rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("llvm-ir"), + params = params, + default_roots = _DEFAULT_ROOTS, + incremental_enabled = ctx.attrs.incremental_enabled, + ), + } + + if LinkageLang("native") in langs or LinkageLang("native-unbundled") in langs: native_param_artifact[params] = link + rust_artifacts = _rust_artifacts( + ctx = ctx, + compile_ctx = compile_ctx, + lang_style_param = lang_style_param, + rust_param_artifact = rust_param_artifact, + ) + + link_infos = _link_infos( + ctx = ctx, + compile_ctx = compile_ctx, + lang_style_param = lang_style_param, + param_artifact = native_param_artifact, + ) + # For doctests, we need to know two things to know how to link them. The # first is that we need a link strategy, which affects how deps of this # target are handled @@ -250,23 +220,22 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: # being built in a "shared" way well, so this must be a static output style. if ctx.attrs.doc_link_style: doc_output_style = { - "shared": LibOutputStyle("pic_archive"), + "shared": DEFAULT_STATIC_LIB_OUTPUT_STYLE, "static": LibOutputStyle("archive"), "static_pic": LibOutputStyle("pic_archive"), }[ctx.attrs.doc_link_style] else: - doc_output_style = LibOutputStyle("pic_archive") + doc_output_style = DEFAULT_STATIC_LIB_OUTPUT_STYLE static_library_params = lang_style_param[(LinkageLang("rust"), doc_output_style)] # Among {rustdoc, doctests, macro expand}, doctests are the only one which # cares about linkage. So whatever build params we picked for the doctests, # reuse them for the other two as well - default_roots = ["lib.rs"] rustdoc = generate_rustdoc( ctx = ctx, compile_ctx = compile_ctx, params = static_library_params, - default_roots = default_roots, + default_roots = _DEFAULT_ROOTS, document_private_items = False, ) @@ -274,7 +243,7 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx = ctx, compile_ctx = compile_ctx, params = static_library_params, - default_roots = default_roots, + default_roots = _DEFAULT_ROOTS, ) expand = rust_compile( @@ -282,7 +251,13 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: compile_ctx = compile_ctx, emit = Emit("expand"), params = static_library_params, - default_roots = default_roots, + default_roots = _DEFAULT_ROOTS, + # This is needed as rustc can generate expanded sources that do not + # fully compile, but will report an error even if it succeeds. + # TODO(pickett): Handle this at the rustc action level, we shouldn't + # need to pass a special arg here, expand should just work. + infallible_diagnostics = True, + incremental_enabled = ctx.attrs.incremental_enabled, ) # If doctests=True or False is set on the individual target, respect that. @@ -303,49 +278,83 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: rustdoc_test = generate_rustdoc_test( ctx = ctx, compile_ctx = compile_ctx, - link_strategy = rustdoc_test_params.dep_link_strategy, - rlib = rust_param_artifact[static_library_params][0].output, + rlib = rust_param_artifact[static_library_params][MetadataKind("link")].output, + link_infos = link_infos, params = rustdoc_test_params, - default_roots = default_roots, + default_roots = _DEFAULT_ROOTS, ) - providers = [] - - link_infos = _link_infos( - ctx = ctx, - compile_ctx = compile_ctx, - lang_style_param = lang_style_param, - param_artifact = native_param_artifact, - ) + # infallible_diagnostics allows us to circumvent compilation failures and + # treat the resulting rustc action as a success, even if a metadata + # artifact was not generated. This allows us to generate diagnostics + # even when the target has bugs. + diag_artifacts = {} + clippy_artifacts = {} + for incr in (True, False): + diag_artifacts[incr] = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("metadata-fast"), + params = meta_params, + default_roots = _DEFAULT_ROOTS, + infallible_diagnostics = True, + incremental_enabled = incr, + ) + clippy_artifacts[incr] = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("clippy"), + params = meta_params, + default_roots = _DEFAULT_ROOTS, + infallible_diagnostics = True, + incremental_enabled = incr, + ) + incr_enabled = ctx.attrs.incremental_enabled + providers = [] providers += _default_providers( lang_style_param = lang_style_param, - param_artifact = rust_param_artifact, + rust_param_artifact = rust_param_artifact, + rust_param_subtargets = rust_param_subtargets, + native_param_artifact = native_param_artifact, rustdoc = rustdoc, rustdoc_test = rustdoc_test, doctests_enabled = doctests_enabled, - check_artifacts = check_artifacts, + check_artifacts = output_as_diag_subtargets(diag_artifacts[incr_enabled], clippy_artifacts[incr_enabled]), expand = expand.output, sources = compile_ctx.symlinked_srcs, rustdoc_coverage = rustdoc_coverage, + named_deps_names = write_named_deps_names(ctx, compile_ctx), ) - rust_link_info = _rust_providers( - ctx = ctx, - compile_ctx = compile_ctx, - lang_style_param = lang_style_param, - param_artifact = rust_param_artifact, - link_infos = link_infos, - ) - providers.append(rust_link_info) - providers += _native_providers( - ctx = ctx, - compile_ctx = compile_ctx, - lang_style_param = lang_style_param, - param_artifact = native_param_artifact, - link_infos = link_infos, - rust_link_info = rust_link_info, + providers += _rust_metadata_providers( + diag_artifacts = diag_artifacts, + clippy_artifacts = clippy_artifacts, ) + if ctx.attrs.proc_macro: + providers += _proc_macro_link_providers( + ctx = ctx, + rust_artifacts = rust_artifacts, + ) + elif toolchain_info.advanced_unstable_linking: + providers += _advanced_unstable_link_providers( + ctx = ctx, + compile_ctx = compile_ctx, + lang_style_param = lang_style_param, + rust_artifacts = rust_artifacts, + native_param_artifact = native_param_artifact, + link_infos = link_infos, + ) + else: + providers += _stable_link_providers( + ctx = ctx, + compile_ctx = compile_ctx, + lang_style_param = lang_style_param, + rust_artifacts = rust_artifacts, + native_param_artifact = native_param_artifact, + link_infos = link_infos, + ) + deps = [dep.dep for dep in resolve_deps(ctx, compile_ctx.dep_ctx)] providers.append(ResourceInfo(resources = gather_resources( label = ctx.label, @@ -355,6 +364,12 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: providers.append(merge_android_packageable_info(ctx.label, ctx.actions, deps)) + providers.append(rust_analyzer_provider( + ctx = ctx, + compile_ctx = compile_ctx, + default_roots = _DEFAULT_ROOTS, + )) + return providers def _build_params_for_styles( @@ -404,95 +419,138 @@ def _build_params_for_styles( return (param_lang, style_param) -def _build_library_artifacts( +def _link_infos( ctx: AnalysisContext, compile_ctx: CompileContext, - params: list[BuildParams]) -> dict[BuildParams, (RustcOutput, RustcOutput)]: - """ - Generate the actual actions to build various output artifacts. Given the set - parameters we need, return a mapping to the linkable and metadata artifacts. - """ - param_artifact = {} + lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], + param_artifact: dict[BuildParams, RustcOutput]) -> dict[LibOutputStyle, LinkInfos]: + if ctx.attrs.proc_macro: + # Don't need any of this for proc macros + return {} - for params in params: - # Separate actions for each emit type - # - # In principle we don't really need metadata for C++-only artifacts, but I don't think it hurts - link, meta = rust_compile_multi( - ctx = ctx, - compile_ctx = compile_ctx, - emits = [Emit("link"), Emit("metadata")], - params = params, - default_roots = ["lib.rs"], + advanced_unstable_linking = compile_ctx.toolchain_info.advanced_unstable_linking + lang = LinkageLang("native-unbundled") if advanced_unstable_linking else LinkageLang("native") + linker_type = compile_ctx.cxx_toolchain_info.linker_info.type + + link_infos = {} + for output_style in LibOutputStyle: + lib = param_artifact[lang_style_param[(lang, output_style)]] + external_debug_info = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = filter(None, [lib.dwo_output_directory]), + children = lib.extra_external_debug_info, ) + if output_style == LibOutputStyle("shared_lib"): + link_infos[output_style] = LinkInfos( + default = LinkInfo( + linkables = [SharedLibLinkable(lib = lib.output)], + external_debug_info = external_debug_info, + pre_flags = ctx.attrs.exported_linker_flags, + post_flags = ctx.attrs.exported_post_linker_flags, + ), + stripped = LinkInfo( + linkables = [SharedLibLinkable(lib = lib.stripped_output)], + external_debug_info = external_debug_info, + pre_flags = ctx.attrs.exported_linker_flags, + post_flags = ctx.attrs.exported_post_linker_flags, + ), + ) + else: + link_infos[output_style] = LinkInfos( + default = LinkInfo( + linkables = [ArchiveLinkable( + archive = Archive(artifact = lib.output), + linker_type = linker_type, + )], + external_debug_info = external_debug_info, + pre_flags = ctx.attrs.exported_linker_flags, + post_flags = ctx.attrs.exported_post_linker_flags, + ), + stripped = LinkInfo( + linkables = [ArchiveLinkable( + archive = Archive(artifact = lib.stripped_output), + linker_type = linker_type, + )], + pre_flags = ctx.attrs.exported_linker_flags, + post_flags = ctx.attrs.exported_post_linker_flags, + ), + ) + return link_infos - param_artifact[params] = (link, meta) +def _rust_artifacts( + ctx: AnalysisContext, + compile_ctx: CompileContext, + lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], + rust_param_artifact: dict[BuildParams, dict[MetadataKind, RustcOutput]]) -> dict[LinkStrategy, RustLinkStrategyInfo]: + pic_behavior = compile_ctx.cxx_toolchain_info.pic_behavior + preferred_linkage = Linkage(ctx.attrs.preferred_linkage) - return param_artifact + rust_artifacts = {} + for link_strategy in LinkStrategy: + params = lang_style_param[(LinkageLang("rust"), get_lib_output_style(link_strategy, preferred_linkage, pic_behavior))] + rust_artifacts[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, link_strategy, rust_param_artifact[params]) + return rust_artifacts def _handle_rust_artifact( ctx: AnalysisContext, dep_ctx: DepCollectionContext, - crate_type: CrateType, link_strategy: LinkStrategy, - link: RustcOutput, - meta: RustcOutput) -> RustLinkStrategyInfo: + outputs: dict[MetadataKind, RustcOutput]) -> RustLinkStrategyInfo: """ - Return the RustLinkInfo for a given set of artifacts. The main consideration + Return the RustLinkStrategyInfo for a given set of artifacts. The main consideration is computing the right set of dependencies. """ # If we're a crate where our consumers should care about transitive deps, # then compute them (specifically, not proc-macro). - if crate_type != CrateType("proc-macro"): - tdeps, tmetadeps, external_debug_info, tprocmacrodeps = _compute_transitive_deps(ctx, dep_ctx, link_strategy) - else: - tdeps, tmetadeps, external_debug_info, tprocmacrodeps = {}, {}, [], {} - + link_output = outputs[MetadataKind("link")] if not ctx.attrs.proc_macro: + tdeps, external_debug_info, tprocmacrodeps = _compute_transitive_deps(ctx, dep_ctx, link_strategy) external_debug_info = make_artifact_tset( actions = ctx.actions, label = ctx.label, - artifacts = filter(None, [link.dwo_output_directory]), + artifacts = filter(None, [link_output.dwo_output_directory]), children = external_debug_info, ) return RustLinkStrategyInfo( - rlib = link.output, + outputs = {m: x.output for m, x in outputs.items()}, transitive_deps = tdeps, - rmeta = meta.output, - transitive_rmeta_deps = tmetadeps, transitive_proc_macro_deps = tprocmacrodeps, - pdb = link.pdb, + pdb = link_output.pdb, external_debug_info = external_debug_info, ) else: # Proc macro deps are always the real thing return RustLinkStrategyInfo( - rlib = link.output, - transitive_deps = tdeps, - rmeta = link.output, - transitive_rmeta_deps = tdeps, - transitive_proc_macro_deps = tprocmacrodeps, - pdb = link.pdb, + outputs = {m: link_output.output for m in MetadataKind}, + transitive_deps = {m: {} for m in MetadataKind}, + transitive_proc_macro_deps = {}, + pdb = link_output.pdb, external_debug_info = ArtifactTSet(), ) def _default_providers( lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], - param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)], + rust_param_artifact: dict[BuildParams, dict[MetadataKind, RustcOutput]], + native_param_artifact: dict[BuildParams, RustcOutput], + rust_param_subtargets: dict[BuildParams, dict[str, RustcOutput]], rustdoc: Artifact, rustdoc_test: cmd_args, doctests_enabled: bool, - check_artifacts: dict[str, Artifact], + check_artifacts: dict[str, Artifact | None], expand: Artifact, sources: Artifact, - rustdoc_coverage: Artifact) -> list[Provider]: + rustdoc_coverage: Artifact, + named_deps_names: Artifact | None) -> list[Provider]: targets = {} targets.update(check_artifacts) targets["sources"] = sources targets["expand"] = expand targets["doc"] = rustdoc targets["doc-coverage"] = rustdoc_coverage + if named_deps_names: + targets["named_deps"] = named_deps_names sub_targets = { k: [DefaultInfo(default_output = v)] for (k, v) in targets.items() @@ -503,8 +561,9 @@ def _default_providers( # determined by `get_output_styles_for_linkage` in `linking/link_info.bzl`. # Do we want to do the same? for output_style in LibOutputStyle: - link, _ = param_artifact[lang_style_param[(LinkageLang("rust"), output_style)]] - nested_sub_targets = {} + param = lang_style_param[(LinkageLang("rust"), output_style)] + link = rust_param_artifact[param][MetadataKind("link")] + nested_sub_targets = {k: [DefaultInfo(default_output = v.output)] for k, v in rust_param_subtargets[param].items()} if link.pdb: nested_sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = link.pdb, binary = link.output) @@ -517,6 +576,20 @@ def _default_providers( sub_targets = nested_sub_targets, )] + lang_style_for_staticlib = (LinkageLang("native"), LibOutputStyle("archive")) + if lang_style_for_staticlib in lang_style_param: + artifact = native_param_artifact[lang_style_param[lang_style_for_staticlib]] + sub_targets["staticlib"] = [DefaultInfo( + default_output = artifact.output, + )] + + lang_style_for_cdylib = (LinkageLang("native"), LibOutputStyle("shared_lib")) + if lang_style_for_cdylib in lang_style_param: + artifact = native_param_artifact[lang_style_param[lang_style_for_cdylib]] + sub_targets["cdylib"] = [DefaultInfo( + default_output = artifact.output, + )] + providers = [] rustdoc_test_info = ExternalRunnerTestInfo( @@ -539,193 +612,195 @@ def _default_providers( return providers -def _rust_link_providers( +def _rust_metadata_providers(diag_artifacts: dict[bool, RustcOutput], clippy_artifacts: dict[bool, RustcOutput]) -> list[Provider]: + return [ + RustcExtraOutputsInfo( + metadata = diag_artifacts[False], + metadata_incr = diag_artifacts[True], + clippy = clippy_artifacts[False], + clippy_incr = clippy_artifacts[True], + ), + ] + +def _proc_macro_link_providers( ctx: AnalysisContext, - dep_ctx: DepCollectionContext, - cxx_toolchain: CxxToolchainInfo, - link_infos: dict[LibOutputStyle, LinkInfos], - preferred_linkage: Linkage) -> ( - MergedLinkInfo, - SharedLibraryInfo, - list[LinkableGraph], - list[Dependency], -): + rust_artifacts: dict[LinkStrategy, RustLinkStrategyInfo]) -> list[Provider]: # These are never accessed in the case of proc macros, so just return some dummy # values - if ctx.attrs.proc_macro: - return ( - create_merged_link_info_for_propagation(ctx, []), - merge_shared_libraries(ctx.actions), - [], - [], - ) + return [RustLinkInfo( + crate = attr_crate(ctx), + strategies = rust_artifacts, + merged_link_infos = {}, + exported_link_deps = [], + shared_libs = merge_shared_libraries(ctx.actions), + linkable_graphs = [], + )] + +def _advanced_unstable_link_providers( + ctx: AnalysisContext, + compile_ctx: CompileContext, + lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], + rust_artifacts: dict[LinkStrategy, RustLinkStrategyInfo], + native_param_artifact: dict[BuildParams, RustcOutput], + link_infos: dict[LibOutputStyle, LinkInfos]) -> list[Provider]: + crate = attr_crate(ctx) + pic_behavior = compile_ctx.cxx_toolchain_info.pic_behavior + preferred_linkage = Linkage(ctx.attrs.preferred_linkage) + + providers = [] + + dep_ctx = compile_ctx.dep_ctx inherited_link_infos = inherited_merged_link_infos(ctx, dep_ctx) inherited_shlibs = inherited_shared_libs(ctx, dep_ctx) inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) inherited_exported_deps = inherited_exported_link_deps(ctx, dep_ctx) - if dep_ctx.advanced_unstable_linking: - # We have to produce a version of the providers that are defined in such - # a way that native rules looking at these providers will also pick up - # the `FORCE_RLIB` behavior. The general approach to that will be to - # claim that we have `preferred_linkage = "static"`. - # - # Note that all of this code is FORCE_RLIB specific. Disabling that - # setting requires replacing this with the "real" native providers - # - # As an optimization, we never bother reporting exported deps here. - # Whichever dependent uses the providers created here will take care of - # that for us. - merged_link_info = create_merged_link_info( - ctx, - cxx_toolchain.pic_behavior, - link_infos, - deps = inherited_link_infos, - preferred_linkage = Linkage("static"), - ) - shared_libs = merge_shared_libraries( - # We never actually have any shared libraries to add - ctx.actions, - deps = inherited_shlibs, + # Native link provider. + merged_link_info = create_merged_link_info( + ctx, + pic_behavior, + link_infos, + deps = inherited_link_infos.values(), + exported_deps = filter(None, [d.get(MergedLinkInfo) for d in inherited_exported_deps]), + preferred_linkage = preferred_linkage, + ) + providers.append(merged_link_info) + + solibs = {} + + # Add the shared library to the list of shared libs. + shlib_name = attr_soname(ctx) + + shared_lib_params = lang_style_param[(LinkageLang("native-unbundled"), LibOutputStyle("shared_lib"))] + shared_lib_output = native_param_artifact[shared_lib_params].output + + # Only add a shared library if we generated one. + # TODO(cjhopman): This is strange. Normally (like in c++) the link_infos passed to create_merged_link_info above would only have + # a value for LibOutputStyle("shared_lib") if that were created and we could just check for that key. Given that I intend + # to remove the SharedLibraries provider, maybe just wait for that to resolve this. + if get_lib_output_style(LinkStrategy("shared"), preferred_linkage, compile_ctx.cxx_toolchain_info.pic_behavior) == LibOutputStyle("shared_lib"): + solibs[shlib_name] = LinkedObject( + output = shared_lib_output, + unstripped_output = shared_lib_output, + external_debug_info = link_infos[LibOutputStyle("shared_lib")].default.external_debug_info, ) - # The link graph representation is a little bit weird, since instead of - # just building up a graph via tsets, it uses a flat list of labeled - # nodes, each with a list of labels for dependency edges. The node that - # we create here cannot just use this target's label, since that would - # conflict with the node created for the native providers. As a result, - # we make up a fake subtarget to get a distinct label - new_label = ctx.label.configured_target().with_sub_target((ctx.label.sub_target or []) + ["fake_force_rlib_subtarget"]) - linkable_graph = create_linkable_graph( + # Native shared library provider. + shared_libs = create_shared_libraries(ctx, solibs) + shared_library_info = merge_shared_libraries( + ctx.actions, + shared_libs, + inherited_shlibs, + ) + providers.append(shared_library_info) + + linkable_graph = create_linkable_graph( + ctx, + node = create_linkable_graph_node( ctx, - node = create_linkable_graph_node( - ctx, - linkable_node = create_linkable_node( - ctx = ctx, - preferred_linkage = Linkage("static"), - deps = inherited_graphs, - link_infos = link_infos, - # FIXME(JakobDegen): It should be ok to set this to `None`, - # but that breaks arc focus, and setting it to "" breaks - # somerge - default_soname = get_default_shared_library_name(cxx_toolchain.linker_info, ctx.label), - # Link groups have a heuristic in which they assume that a - # preferred_linkage = "static" library needs to be linked - # into every single link group, instead of just one. - # Applying that same heuristic to Rust seems right, but only - # if this target actually requested that. Opt ourselves out - # if it didn't. - ignore_force_static_follows_dependents = preferred_linkage != Linkage("static"), - include_in_android_mergemap = False, # TODO(pickett): Plumb D54748362 to the macro layer - ), - label = new_label, + linkable_node = create_linkable_node( + ctx = ctx, + preferred_linkage = preferred_linkage, + deps = inherited_graphs, + exported_deps = inherited_exported_deps, + link_infos = link_infos, + shared_libs = shared_libs, + default_soname = shlib_name, + # Link groups have a heuristic in which they assume that a + # preferred_linkage = "static" library needs to be linked + # into every single link group, instead of just one. + # Applying that same heuristic to Rust seems right, but only + # if this target actually requested that. Opt ourselves out + # if it didn't. + ignore_force_static_follows_dependents = preferred_linkage != Linkage("static"), + include_in_android_mergemap = False, # TODO(pickett): Plumb D54748362 to the macro layer ), - deps = inherited_graphs, - ) + ), + deps = inherited_graphs + inherited_exported_deps, + ) - # We've already reported transitive deps on the inherited graphs, so for - # most purposes it would be fine to just have `linkable_graph` here. - # However, link groups do an analysis that relies on each symbol - # reference having a matching edge in the link graph, and so reexports - # and generics mean that we have to report a dependency on all - # transitive Rust deps and their immediate non-Rust deps - link_graphs = inherited_graphs + [linkable_graph] - else: - merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) - shared_libs = merge_shared_libraries( - ctx.actions, - deps = inherited_shlibs, - ) - link_graphs = inherited_graphs - return (merged_link_info, shared_libs, link_graphs, inherited_exported_deps) + providers.append(linkable_graph) + + # Omnibus root provider. + linkable_root = create_linkable_root( + label = ctx.label, + name = shlib_name, + link_infos = LinkInfos( + default = set_link_info_link_whole(link_infos[LibOutputStyle("pic_archive")].default), + ), + deps = inherited_graphs, + ) + providers.append(linkable_root) + + # Mark libraries that support `dlopen`. + if getattr(ctx.attrs, "supports_python_dlopen", False): + providers.append(DlopenableLibraryInfo()) + + # We never need to add anything to this provider because Rust libraries + # cannot act as link group libs, especially given that they only support + # auto link groups anyway + providers.append(merge_link_group_lib_info(children = inherited_link_group_lib_infos(ctx, compile_ctx.dep_ctx))) + + # Create rust library provider. + providers.append(RustLinkInfo( + crate = crate, + strategies = rust_artifacts, + merged_link_infos = inherited_link_infos | {ctx.label.configured_target(): merged_link_info}, + exported_link_deps = inherited_exported_deps, + shared_libs = shared_library_info, + linkable_graphs = inherited_graphs + [linkable_graph], + )) + + return providers -def _rust_providers( +def _stable_link_providers( ctx: AnalysisContext, compile_ctx: CompileContext, lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], - param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)], - link_infos: dict[LibOutputStyle, LinkInfos]) -> RustLinkInfo: - """ - Return the set of providers for Rust linkage. - """ - crate = attr_crate(ctx) - - pic_behavior = compile_ctx.cxx_toolchain_info.pic_behavior - preferred_linkage = Linkage(ctx.attrs.preferred_linkage) + native_param_artifact: dict[BuildParams, RustcOutput], + rust_artifacts: dict[LinkStrategy, RustLinkStrategyInfo], + link_infos: dict[LibOutputStyle, LinkInfos]) -> list[Provider]: + providers = [] - strategy_info = {} - for link_strategy in LinkStrategy: - params = lang_style_param[(LinkageLang("rust"), get_lib_output_style(link_strategy, preferred_linkage, pic_behavior))] - link, meta = param_artifact[params] - strategy_info[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params.crate_type, link_strategy, link, meta) + crate = attr_crate(ctx) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos, Linkage(ctx.attrs.preferred_linkage)) + merged_link_infos, shared_libs, linkable_graphs, exported_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx) # Create rust library provider. rust_link_info = RustLinkInfo( crate = crate, - strategies = strategy_info, - merged_link_info = merged_link_info, - exported_link_deps = inherited_link_deps, + strategies = rust_artifacts, + merged_link_infos = merged_link_infos, + exported_link_deps = exported_link_deps, shared_libs = shared_libs, - linkable_graphs = inherited_graphs, + linkable_graphs = linkable_graphs, ) - return rust_link_info + providers.append(rust_link_info) + providers += _native_link_providers(ctx, compile_ctx, lang_style_param, native_param_artifact, link_infos, rust_link_info) + return providers -def _link_infos( +def _rust_link_providers( ctx: AnalysisContext, - compile_ctx: CompileContext, - lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], - param_artifact: dict[BuildParams, RustcOutput]) -> dict[LibOutputStyle, LinkInfos]: - if ctx.attrs.proc_macro: - # Don't need any of this for proc macros - return {} - - advanced_unstable_linking = compile_ctx.toolchain_info.advanced_unstable_linking - lang = LinkageLang("native-unbundled") if advanced_unstable_linking else LinkageLang("native") - linker_type = compile_ctx.cxx_toolchain_info.linker_info.type + dep_ctx: DepCollectionContext) -> ( + dict[ConfiguredTargetLabel, MergedLinkInfo], + SharedLibraryInfo, + list[LinkableGraph], + list[Dependency], +): + inherited_link_infos = inherited_merged_link_infos(ctx, dep_ctx) + inherited_shlibs = inherited_shared_libs(ctx, dep_ctx) + inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) + inherited_exported_deps = inherited_exported_link_deps(ctx, dep_ctx) - link_infos = {} - for output_style in LibOutputStyle: - lib = param_artifact[lang_style_param[(lang, output_style)]] - external_debug_info = make_artifact_tset( - actions = ctx.actions, - label = ctx.label, - artifacts = filter(None, [lib.dwo_output_directory]), - children = lib.extra_external_debug_info, - ) - if output_style == LibOutputStyle("shared_lib"): - link_infos[output_style] = LinkInfos( - default = LinkInfo( - linkables = [SharedLibLinkable(lib = lib.output)], - external_debug_info = external_debug_info, - ), - stripped = LinkInfo( - linkables = [SharedLibLinkable(lib = lib.stripped_output)], - external_debug_info = external_debug_info, - ), - ) - else: - link_infos[output_style] = LinkInfos( - default = LinkInfo( - linkables = [ArchiveLinkable( - archive = Archive(artifact = lib.output), - linker_type = linker_type, - )], - external_debug_info = external_debug_info, - ), - stripped = LinkInfo( - linkables = [ArchiveLinkable( - archive = Archive(artifact = lib.stripped_output), - linker_type = linker_type, - )], - ), - ) - return link_infos + shared_libs = merge_shared_libraries( + ctx.actions, + deps = inherited_shlibs, + ) + return (inherited_link_infos, shared_libs, inherited_graphs, inherited_exported_deps) -def _native_providers( +def _native_link_providers( ctx: AnalysisContext, compile_ctx: CompileContext, lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], @@ -737,36 +812,15 @@ def _native_providers( (ie C/C++) code, along with relevant dependencies. """ - if ctx.attrs.proc_macro: - # Proc-macros never have a native form - return [] - - # If advanced_unstable_linking is set on the the rust toolchain, then build this artifact - # using the "native-unbundled" linkage language. See LinkageLang docs for more details - advanced_unstable_linking = compile_ctx.toolchain_info.advanced_unstable_linking - lang = LinkageLang("native-unbundled") if advanced_unstable_linking else LinkageLang("native") - - if advanced_unstable_linking: - # The rust link providers already contain the linkables for the `archive` and `pic_archive` - # cases - link_infos = { - LibOutputStyle("shared_lib"): link_infos[LibOutputStyle("shared_lib")], - LibOutputStyle("archive"): LinkInfos(default = LinkInfo()), - LibOutputStyle("pic_archive"): LinkInfos(default = LinkInfo()), - } - # We collected transitive deps in the Rust link providers - inherited_link_infos = [rust_link_info.merged_link_info] + inherited_link_infos = rust_link_info.merged_link_infos inherited_shlibs = [rust_link_info.shared_libs] inherited_link_graphs = rust_link_info.linkable_graphs inherited_exported_deps = rust_link_info.exported_link_deps - linker_info = compile_ctx.cxx_toolchain_info.linker_info - linker_type = linker_info.type - providers = [] - shared_lib_params = lang_style_param[(lang, LibOutputStyle("shared_lib"))] + shared_lib_params = lang_style_param[(LinkageLang("native"), LibOutputStyle("shared_lib"))] shared_lib_output = param_artifact[shared_lib_params].output preferred_linkage = Linkage(ctx.attrs.preferred_linkage) @@ -776,7 +830,7 @@ def _native_providers( ctx, compile_ctx.cxx_toolchain_info.pic_behavior, link_infos, - deps = inherited_link_infos, + deps = inherited_link_infos.values(), exported_deps = filter(None, [d.get(MergedLinkInfo) for d in inherited_exported_deps]), preferred_linkage = preferred_linkage, )) @@ -784,7 +838,7 @@ def _native_providers( solibs = {} # Add the shared library to the list of shared libs. - shlib_name = get_default_shared_library_name(linker_info, ctx.label) + shlib_name = attr_soname(ctx) # Only add a shared library if we generated one. # TODO(cjhopman): This is strange. Normally (like in c++) the link_infos passed to create_merged_link_info above would only have @@ -798,26 +852,19 @@ def _native_providers( ) # Native shared library provider. + shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, inherited_shlibs, )) # Omnibus root provider. linkable_root = create_linkable_root( + label = ctx.label, name = shlib_name, link_infos = LinkInfos( - default = LinkInfo( - linkables = [ArchiveLinkable( - archive = Archive( - artifact = shared_lib_output, - ), - linker_type = linker_type, - link_whole = True, - )], - external_debug_info = link_infos[LibOutputStyle("pic_archive")].default.external_debug_info, - ), + default = set_link_info_link_whole(link_infos[LibOutputStyle("pic_archive")].default), ), deps = inherited_link_graphs, ) @@ -837,7 +884,7 @@ def _native_providers( deps = inherited_link_graphs, exported_deps = inherited_exported_deps, link_infos = link_infos, - shared_libs = solibs, + shared_libs = shared_libs, default_soname = shlib_name, include_in_android_mergemap = False, ), @@ -852,6 +899,19 @@ def _native_providers( # auto link groups anyway providers.append(merge_link_group_lib_info(children = inherited_link_group_lib_infos(ctx, compile_ctx.dep_ctx))) + providers.append( + create_unix_env_info( + actions = ctx.actions, + env = UnixEnv( + label = ctx.label, + native_libs = [shared_libs], + ), + #deps = [dep.dep for dep in resolve_deps(ctx, compile_ctx.dep_ctx)] + #deps = deps, + deps = inherited_exported_deps, + ), + ) + return providers # Compute transitive deps. Caller decides whether this is necessary. @@ -859,13 +919,12 @@ def _compute_transitive_deps( ctx: AnalysisContext, dep_ctx: DepCollectionContext, dep_link_strategy: LinkStrategy) -> ( - dict[Artifact, CrateName], - dict[Artifact, CrateName], + dict[MetadataKind, dict[Artifact, CrateName]], list[ArtifactTSet], dict[RustProcMacroMarker, ()], ): - transitive_deps = {} - transitive_rmeta_deps = {} + toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] + transitive_deps = {m: {} for m in MetadataKind} external_debug_info = [] transitive_proc_macro_deps = {} @@ -875,18 +934,16 @@ def _compute_transitive_deps( # We don't want to propagate proc macros directly, and they have no transitive deps continue - strategy = strategy_info(dep.info, dep_link_strategy) - transitive_deps[strategy.rlib] = dep.info.crate - transitive_deps.update(strategy.transitive_deps) - - transitive_rmeta_deps[strategy.rmeta] = dep.info.crate - transitive_rmeta_deps.update(strategy.transitive_rmeta_deps) + strategy = strategy_info(toolchain_info, dep.info, dep_link_strategy) + for m in MetadataKind: + transitive_deps[m][strategy.outputs[m]] = dep.info.crate + transitive_deps[m].update(strategy.transitive_deps[m]) external_debug_info.append(strategy.external_debug_info) transitive_proc_macro_deps.update(strategy.transitive_proc_macro_deps) - return transitive_deps, transitive_rmeta_deps, external_debug_info, transitive_proc_macro_deps + return transitive_deps, external_debug_info, transitive_proc_macro_deps def rust_library_macro_wrapper(rust_library: typing.Callable) -> typing.Callable: def wrapper(**kwargs): diff --git a/prelude/rust/rust_toolchain.bzl b/prelude/rust/rust_toolchain.bzl index 9e2a41955fa..a69e40d69ae 100644 --- a/prelude/rust/rust_toolchain.bzl +++ b/prelude/rust/rust_toolchain.bzl @@ -40,33 +40,26 @@ rust_toolchain_attrs = { "rustc_target_triple": provider_field(str | None, default = None), # Baseline compiler config "rustc_flags": provider_field(list[typing.Any], default = []), + # Rustc flags, except that they are applied on the command line after the + # target's rustc flags + "extra_rustc_flags": provider_field(list[typing.Any], default = []), + # Flags applied only on check builds + "rustc_check_flags": provider_field(list[typing.Any], default = []), # Extra flags when building binaries "rustc_binary_flags": provider_field(list[typing.Any], default = []), - # Extra flags for doing check builds - "rustc_check_flags": provider_field(list[typing.Any], default = []), # Extra flags for doing building tests "rustc_test_flags": provider_field(list[typing.Any], default = []), # Extra flags when coverage is enabled for a target # FIXME(JakobDegen): Can't use `list[str]` here, because then the default is wrong, but can't # use a non-empty list as the default because lists are mutable "rustc_coverage_flags": provider_field(typing.Any, default = ("-Cinstrument-coverage",)), + # Extra env variables that should be made available to the rustdoc executable. + "rustdoc_env": provider_field(dict[str, typing.Any], default = {}), # Extra flags for rustdoc invocations "rustdoc_flags": provider_field(list[typing.Any], default = []), - # Use rmeta for lib->lib dependencies, and only block - # linking on rlib crates. The hope is that rmeta builds - # are quick and this increases effective parallelism. - "pipelined": provider_field(bool, default = False), # When you `buck test` a library, also compile and run example code in its # documentation comments. "doctests": provider_field(bool, default = False), - # Filter out failures when we just need diagnostics. That is, - # a rule which fails with a compilation failure will report - # success as an RE action, but a "failure filter" action will - # report the failure if some downstream action needs one of the - # artifacts. If all you need is diagnostics, then it will report - # success. This doubles the number of actions, so it should only - # be explicitly enabled when needed. - "failure_filter": provider_field(bool, default = False), # The Rust compiler (rustc) "compiler": provider_field(RunInfo | None, default = None), # Rust documentation extractor (rustdoc) @@ -79,6 +72,12 @@ rust_toolchain_attrs = { "rustdoc_test_with_resources": provider_field(RunInfo | None, default = None), # Wrapper for rustdoc coverage "rustdoc_coverage": provider_field(RunInfo | None, default = None), + # These two scripts are used to implement deferred linking, where the link action + # is separate from the rustc invocation action. The benefit here is that we can + # decouple the action graph such that rustc can compile libs without waiting for + # the link step from shared lib dependencies from completing. + "deferred_link_action": provider_field(RunInfo | None, default = None), + "extract_link_action": provider_field(RunInfo | None, default = None), # Failure filter action "failure_filter_action": provider_field(RunInfo | None, default = None), # The default edition to use, if not specified. @@ -87,11 +86,23 @@ rust_toolchain_attrs = { "allow_lints": provider_field(list[typing.Any], default = []), "deny_lints": provider_field(list[typing.Any], default = []), "warn_lints": provider_field(list[typing.Any], default = []), + # Deny-on-Check lints are handled differently depending on the build. + # + # For check builds, e.g. [check], [diag.json], [clippy.json] subtargets, or the default target + # for `rust_library` rules, these lints will be applied as Deny Lints. Importantly, this means + # that when you call `buck build :rust_lib` or use tools like arc rust-check or rustfix, these + # lints will be surfaced as errors. + # + # However, for "regular" builds, e.g. when building tests or binaries, or building this target + # as a dependency of another target, these flags will be surfaced only as warnings. The primary + # benefit here is that you can develop + test your code as normal and will not be blocked by + # these lints. However, once you run rust check, or submit your code to phabricator, these + # lints will prevent you from landing your code. This way we can introduce lints that we'd like + # to deny from our codebase without slowing down your inner dev loop, or encouraging you to + # --cap-warns=lint for your projects. + "deny_on_check_lints": provider_field(list[typing.Any], default = []), # Clippy configuration file clippy.toml "clippy_toml": provider_field(Artifact | None, default = None), - # URL prefix (e.g. /path/to/docs) where crates' docs are hosted. Used for - # linking types in signatures to their definition in another crate. - "extern_html_root_url_prefix": provider_field(str | None, default = None), # Utilities used for building flagfiles containing dynamic crate names "transitive_dependency_symlinks_tool": provider_field(RunInfo | None, default = None), # Setting this enables additional behaviors that improves linking at the @@ -102,6 +113,10 @@ rust_toolchain_attrs = { # FIXME(JakobDegen): This should require `explicit_sysroot_deps` in the # future. "advanced_unstable_linking": provider_field(bool, default = False), + # Override the implicit sysroot with the provided Artifact containing a directory to + # a prebuilt sysroot. Will be forwarded to rustc as `--sysroot=`. Only + # one of this and `explicit_sysroot_deps` may be set. + "sysroot_path": provider_field(Artifact | None, default = None), # See the documentation on the type for details "explicit_sysroot_deps": provider_field(RustExplicitSysrootDeps | None, default = None), # The panic runtime to use. This is a part of the target definition and is @@ -122,6 +137,9 @@ rust_toolchain_attrs = { # # FIXME(JakobDegen): Fix `enum` so that we can set `unwind` as the default "panic_runtime": provider_field(PanicRuntime), + # Setting this allows Rust rules to use features which are only available + # on nightly release. + "nightly_features": provider_field(bool, default = False), } RustToolchainInfo = provider(fields = rust_toolchain_attrs) diff --git a/prelude/rust/tools/BUCK.v2 b/prelude/rust/tools/BUCK.v2 index 63fb446d864..03154a325d8 100644 --- a/prelude/rust/tools/BUCK.v2 +++ b/prelude/rust/tools/BUCK.v2 @@ -1,4 +1,13 @@ -load(":tool_rules.bzl", "get_rustc_cfg") +load("@prelude//utils:source_listing.bzl", "source_listing") +load( + ":tool_rules.bzl", + "get_rustc_cfg", + "linkable_symbol_supports_no_std", +) + +oncall("build_infra") + +source_listing() prelude = native @@ -7,6 +16,18 @@ get_rustc_cfg( visibility = ["PUBLIC"], ) +prelude.python_bootstrap_binary( + name = "deferred_link_action", + main = "deferred_link_action.py", + visibility = ["PUBLIC"], +) + +prelude.python_bootstrap_binary( + name = "extract_link_action", + main = "extract_link_action.py", + visibility = ["PUBLIC"], +) + prelude.python_bootstrap_binary( name = "rustc_action", main = "rustc_action.py", @@ -42,3 +63,13 @@ prelude.python_bootstrap_binary( main = "rustdoc_coverage.py", visibility = ["PUBLIC"], ) + +prelude.export_file( + name = "linkable_symbol.rs", + visibility = ["PUBLIC"], +) + +linkable_symbol_supports_no_std( + name = "linkable_symbol_supports_no_std", + visibility = ["PUBLIC"], +) diff --git a/prelude/rust/tools/attrs.bzl b/prelude/rust/tools/attrs.bzl index 5e181941a1f..2a18fc0c367 100644 --- a/prelude/rust/tools/attrs.bzl +++ b/prelude/rust/tools/attrs.bzl @@ -12,6 +12,8 @@ def _internal_tool(default: str) -> Attr: # configurable attributes there. This list of internal tools is distracting and # expected to grow. internal_tool_attrs = { + "deferred_link_action": _internal_tool("prelude//rust/tools:deferred_link_action"), + "extract_link_action": _internal_tool("prelude//rust/tools:extract_link_action"), "failure_filter_action": _internal_tool("prelude//rust/tools:failure_filter_action"), "rustc_action": _internal_tool("prelude//rust/tools:rustc_action"), "rustdoc_coverage": _internal_tool("prelude//rust/tools:rustdoc_coverage"), diff --git a/prelude/rust/tools/deferred_link_action.py b/prelude/rust/tools/deferred_link_action.py new file mode 100644 index 00000000000..87255d847aa --- /dev/null +++ b/prelude/rust/tools/deferred_link_action.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# Execute a previously deferred link action. The inputs to this script are expected to come from +# a previous invocation of `extract_link_action.py`. The main special processing here is to handle +# the optional version script argument, and pass the objects located in the provided directory +# as individual inputs to the linker command. + +import argparse +import asyncio +import os +import sys +import tempfile +from pathlib import Path +from typing import Any, List, NamedTuple + + +def eprint(*args: Any, **kwargs: Any) -> None: + print(*args, end="\n", file=sys.stderr, flush=True, **kwargs) + + +class Args(NamedTuple): + objects: Path + version_script: Path + linker: List[str] + + +def arg_parse() -> Args: + parser = argparse.ArgumentParser() + parser.add_argument( + "--objects", + type=Path, + required=True, + ) + parser.add_argument( + "--version-script", + type=Path, + required=True, + ) + parser.add_argument( + "linker", + nargs=argparse.REMAINDER, + type=str, + help="Linker command line", + ) + + return Args(**vars(parser.parse_args())) + + +def unpack_objects(objects: Path) -> List[str]: + return [os.path.join(objects, x) for x in os.listdir(objects) if x.endswith(".o")] + + +async def main() -> int: + args = arg_parse() + + linker_cmd = args.linker[:1] + + objects = unpack_objects(args.objects) + + with tempfile.NamedTemporaryFile( + mode="wb", + prefix="real-linker-args-", + suffix=".txt", + delete=False, + ) as args_file: + # Some platforms do not use version-scripts. For those platforms we simply + # do not pass the version-script to the linker. + if os.path.getsize(args.version_script) > 0: + args_file.write( + b"-Wl,--version-script=" + str(args.version_script).encode() + b"\n" + ) + + args_file.write("\n".join(objects).encode() + b"\n") + args_file.write("\n".join(args.linker[1:]).encode() + b"\n") + args_file.flush() + + proc = await asyncio.create_subprocess_exec( + *linker_cmd, + "@" + args_file.name, + env=os.environ, + limit=1_000_000, + ) + res = await proc.wait() + + return res + + +sys.exit(asyncio.run(main())) diff --git a/prelude/rust/tools/extract_link_action.py b/prelude/rust/tools/extract_link_action.py new file mode 100644 index 00000000000..f101c6f509c --- /dev/null +++ b/prelude/rust/tools/extract_link_action.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# A "fake" linker command meant to be provided to rustc as `-Clinker={}`. This script will process +# the arguments passed in from rustc and export the objects, version script, and other arguments +# as outputs to later be used by an invocation of `deferred_link_action.py`. +# +# Some arguments here are stripped out e.g. -L in order to save work from having to persist +# an artifact between this action and the deferred link action. See the comments in +# `process_link_args()` for more details. + +import argparse +import os +import shutil +import sys +from pathlib import Path +from typing import Any, IO, List, NamedTuple, Tuple + + +def eprint(*args: Any, **kwargs: Any) -> None: + print(*args, end="\n", file=sys.stderr, flush=True, **kwargs) + + +class Args(NamedTuple): + out_argsfile: IO[str] + out_version_script: Path + out_objects: Path + linker: List[str] + + +def arg_parse() -> Args: + parser = argparse.ArgumentParser() + parser.add_argument( + "--out_argsfile", + type=argparse.FileType("w"), + required=True, + ) + parser.add_argument( + "--out_version-script", + type=Path, + required=True, + ) + parser.add_argument( + "--out_objects", + type=Path, + required=True, + ) + parser.add_argument( + "linker", + nargs=argparse.REMAINDER, + type=str, + help="Linker command line", + ) + + return Args(**vars(parser.parse_args())) + + +def process_link_args(args: List[str]) -> Tuple[List[str], Path | None, List[Path]]: + new_args = [] + version_script = None + objects = [] + + i = 0 + size = len(args) + while i < size: + arg = args[i] + # We want to extract the version script file as an artifact to pass along to the deferred + # link action. rustc by default exports this file to somewhere in the TMP directory, so we + # must persist it ourselves between actions via an artifact. + if arg.startswith("-Wl,--version-script"): + version_script = Path(arg.split("=")[1]) + i += 1 + continue + # These are the artifacts that rustc generates as inputs to the linker. + elif arg.endswith("rcgu.o") or arg.endswith("symbols.o"): + objects.append(Path(arg)) + i += 1 + continue + # We don't need either of these, and omitting them from the deferred link args will save + # us from having to pass them to the deferred link action. + # The .rlib files here are hollow rlibs, providing only metadata for each dependency. These + # files have no impact on the link step; they're only needed by rustc. + # The .rmeta file contains the metadata section for this crate being linked. Again, since + # rmeta is not used at all for linking, we can omit the section entirely from our link step. + elif arg.endswith(".rlib") or arg.endswith(".rmeta"): + i += 1 + continue + # The -L flag is used by rustc to pass the sysroot as a linker search path. When compiling + # we pass a dummy empty sysroot to rustc, so this path is not needed. The real -L flags for + # transitive deps are passed along in a separate args file. + # The -o flag here is set by rustc to a temporary output location. In a normal rustc link, + # rustc will eventually copy the temporary output file to the final location specified by + # --emit=link={}. Since this path is temporary, we can simply omit it and pass the real + # path needed by buck directly to the deferred link action. + elif arg.startswith("-L") or arg.startswith("-o"): + i += 2 # skip the next line + continue + + new_args.append(arg) + i += 1 + + return (new_args, version_script, objects) + + +def unpack_objects(objects: Path) -> List[str]: + return [x for x in os.listdir(objects) if x.endswith(".o") or x.endswith(".rmeta")] + + +def main() -> int: + args = arg_parse() + + filtered_args, version_script, objects = process_link_args(args.linker[1:]) + args.out_argsfile.write("\n".join(filtered_args)) + args.out_argsfile.close() + + if version_script: + shutil.copy(version_script, args.out_version_script) + else: + # Touch the file to make buck2 happy + args.out_version_script.touch() + + os.mkdir(args.out_objects) + for obj in objects: + shutil.copy(obj, args.out_objects) + + return 0 + + +sys.exit(main()) diff --git a/prelude/rust/tools/linkable_symbol.rs b/prelude/rust/tools/linkable_symbol.rs new file mode 100644 index 00000000000..861c8334cde --- /dev/null +++ b/prelude/rust/tools/linkable_symbol.rs @@ -0,0 +1,49 @@ +/* + * Copyright (c) Meta Platforms, Inc. and affiliates. + * + * This source code is licensed under both the MIT license found in the + * LICENSE-MIT file in the root directory of this source tree and the Apache + * License, Version 2.0 found in the LICENSE-APACHE file in the root directory + * of this source tree. + */ + +#![cfg_attr(set_nostd, no_std)] + +#[cfg(any(rust_linkable_symbol_content_bytes, rust_linkable_symbol_getter_bytes))] +#[repr(C)] +#[cfg_attr(rust_linkable_symbol_align_bytes = "2", repr(align(2)))] +#[cfg_attr(rust_linkable_symbol_align_bytes = "4", repr(align(4)))] +#[cfg_attr(rust_linkable_symbol_align_bytes = "8", repr(align(8)))] +struct Aligned { + bytes: Bytes, +} + +#[cfg(rust_linkable_symbol_content_str)] +#[used] +#[export_name = env!("LINKABLE_SYMBOL")] +pub static LINKABLE_SYMBOL: &str = include_str!("content"); + +#[cfg(rust_linkable_symbol_content_bytes)] +#[used] +#[export_name = env!("LINKABLE_SYMBOL")] +pub static LINKABLE_SYMBOL: &Aligned<[u8]> = &Aligned { + bytes: *include_bytes!("content"), +}; + +#[cfg(rust_linkable_symbol_getter_str)] +pub fn get() -> &'static str { + extern "Rust" { + #[link_name = env!("LINKABLE_SYMBOL")] + static LINKABLE_SYMBOL: &'static str; + } + unsafe { LINKABLE_SYMBOL } +} + +#[cfg(rust_linkable_symbol_getter_bytes)] +pub fn get() -> &'static [u8] { + extern "Rust" { + #[link_name = env!("LINKABLE_SYMBOL")] + static LINKABLE_SYMBOL: &'static Aligned<[u8]>; + } + unsafe { &LINKABLE_SYMBOL.bytes } +} diff --git a/prelude/rust/tools/rustc_action.py b/prelude/rust/tools/rustc_action.py index 3f942047427..d15971bf969 100755 --- a/prelude/rust/tools/rustc_action.py +++ b/prelude/rust/tools/rustc_action.py @@ -42,6 +42,7 @@ def eprint(*args: Any, **kwargs: Any) -> None: def key_value_arg(s: str) -> Tuple[str, str]: + s = arg_eval(s) key_value = s.split("=", maxsplit=1) if len(key_value) == 2: return (key_value[0], key_value[1]) @@ -58,6 +59,7 @@ class Args(NamedTuple): buck_target: Optional[str] failure_filter: Optional[IO[bytes]] required_output: Optional[List[Tuple[str, str]]] + echo: Optional[IO[bytes]] rustc: List[str] @@ -118,10 +120,15 @@ def arg_parse() -> Args: help="Required output path we expect rustc to generate " "(and filled with a placeholder on a filtered failure)", ) + parser.add_argument( + "--echo", + type=argparse.FileType("wb"), + help="Write the input command line to this file, without running it", + ) parser.add_argument( "rustc", nargs=argparse.REMAINDER, - type=str, + type=arg_eval, help="Compiler command line", ) @@ -230,6 +237,10 @@ async def handle_output( # noqa: C901 async def main() -> int: args = arg_parse() + if args.echo: + args.echo.write("".join(arg + "\n" for arg in args.rustc).encode("utf-8")) + return 0 + # Inherit a very limited initial environment, then add the new things env = { k: os.environ[k] @@ -244,6 +255,7 @@ async def main() -> int: "LOCALAPPDATA", "PROGRAMDATA", "TEMP", + "TMP", # TODO(andirauter): Required by RE. Remove them when no longer required T119466023 "EXECUTION_ID", "SESSION_ID", @@ -264,7 +276,6 @@ async def main() -> int: ] if k in os.environ } - nix_env(env) if args.env: # Unescape previously escaped newlines. # Example: \\\\n\\n -> \\\n\n -> \\n\n @@ -272,15 +283,14 @@ async def main() -> int: {k: v.replace("\\n", "\n").replace("\\\n", "\\n") for k, v in args.env} ) if args.path_env: - env.update({k: str(Path(v).resolve()) for k, v in args.path_env}) + env.update({k: os.path.abspath(v) for k, v in args.path_env}) crate_map = dict(args.crate_map) if args.crate_map else {} if DEBUG: print(f"args {repr(args)} env {env} crate_map {crate_map}", end="\n") - rustc_cmd = args.rustc[:1] - rustc_args = [arg_eval(arg) for arg in args.rustc[1:]] + rustc_cmd, rustc_args = args.rustc[:1], args.rustc[1:] if args.remap_cwd_prefix is not None: rustc_args.append( @@ -362,39 +372,5 @@ async def main() -> int: return res -NIX_ENV_VARS = [ - "NIX_BINTOOLS", - "NIX_BINTOOLS_FOR_TARGET", - "NIX_CC", - "NIX_CC_FOR_TARGET", - "NIX_CFLAGS_COMPILE", - "NIX_CFLAGS_COMPILE_FOR_TARGET", - "NIX_COREFOUNDATION_RPATH", - "NIX_DONT_SET_RPATH", - "NIX_DONT_SET_RPATH_FOR_BUILD", - "NIX_ENFORCE_NO_NATIVE", - "NIX_HARDENING_ENABLE", - "NIX_IGNORE_LD_THROUGH_GCC", - "NIX_LDFLAGS", - "NIX_LDFLAGS_FOR_TARGET", - "NIX_NO_SELF_RPATH", -] -NIX_ENV_VAR_PREFIXES = [ - "NIX_BINTOOLS_WRAPPER_TARGET_HOST_", - "NIX_BINTOOLS_WRAPPER_TARGET_TARGET_", - "NIX_CC_WRAPPER_TARGET_HOST_", - "NIX_CC_WRAPPER_TARGET_TARGET_", -] - - -def nix_env(env: Dict[str, str]): - env.update({k: os.environ[k] for k in NIX_ENV_VARS if k in os.environ}) - for prefix in NIX_ENV_VAR_PREFIXES: - vars_starting_with = dict( - filter(lambda pair: pair[0].startswith(prefix), - os.environ.items())) - env.update({k: v for k, v in vars_starting_with.items()}) - -# There is a bug with asyncio.run() on Windows: -# https://bugs.python.org/issue39232 -sys.exit(asyncio.new_event_loop().run_until_complete(main())) + +sys.exit(asyncio.run(main())) diff --git a/prelude/rust/tools/tool_rules.bzl b/prelude/rust/tools/tool_rules.bzl index 48c507e6978..2184cd04c01 100644 --- a/prelude/rust/tools/tool_rules.bzl +++ b/prelude/rust/tools/tool_rules.bzl @@ -30,3 +30,20 @@ get_rustc_cfg = rule( "_rust_toolchain": toolchains_common.rust(), }, ) + +def _linkable_symbol_supports_no_std_impl(ctx: AnalysisContext) -> list[Provider]: + toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] + + # `#[no_std]` requires use of `advanced_unstable_linking` on the toolchain, + # as otherwise the panic handler is missing. + cfg = "--cfg=set_nostd\n" if toolchain_info.advanced_unstable_linking else "" + + flagfile = ctx.actions.write("cfg", cfg) + return [DefaultInfo(default_output = flagfile)] + +linkable_symbol_supports_no_std = rule( + impl = _linkable_symbol_supports_no_std_impl, + attrs = { + "_rust_toolchain": toolchains_common.rust(), + }, +) diff --git a/prelude/sh_binary.bzl b/prelude/sh_binary.bzl index 679c813a45c..c54dcd5c07b 100644 --- a/prelude/sh_binary.bzl +++ b/prelude/sh_binary.bzl @@ -50,18 +50,9 @@ def _generate_script( # construct links directly to things (which buck1 actually also did for its # BUCK_DEFAULT_RUNTIME_RESOURCES). if not is_windows: - script_content = cmd_args([ + script_content = cmd_args( "#!/usr/bin/env bash", "set -e", - # This is awkward for two reasons: args doesn't support format strings - # and will insert a newline between items and so __RESOURCES_ROOT - # is put in a bash array, and we want it to be relative to script's - # dir, not the script itself, but there's no way to do that in - # starlark. To deal with this, we strip the first 3 characters - # (`../`). - "__RESOURCES_ROOT=(", - resources_dir, - ")", # If we access this sh_binary via a unhashed symlink we need to # update the relative source. '__SRC="${BASH_SOURCE[0]}"', @@ -72,7 +63,7 @@ def _generate_script( # identify what the right format is. For now, this variable lets # callees disambiguate (see D28960177 for more context). "export BUCK_SH_BINARY_VERSION_UNSTABLE=2", - "export BUCK_PROJECT_ROOT=$__SCRIPT_DIR/\"${__RESOURCES_ROOT:3}\"", + cmd_args("export BUCK_PROJECT_ROOT=\"$__SCRIPT_DIR/", resources_dir, "\"", delimiter = ""), # In buck1, the paths for resources that are outputs of rules have # different paths in BUCK_PROJECT_ROOT and # BUCK_DEFAULT_RUNTIME_RESOURCES, but we use the same paths. buck1's @@ -82,13 +73,12 @@ def _generate_script( # sources, the paths are the same for both. "export BUCK_DEFAULT_RUNTIME_RESOURCES=\"$BUCK_PROJECT_ROOT\"", "exec \"$BUCK_PROJECT_ROOT/{}\" \"$@\"".format(main_link), - ]).relative_to(script) + relative_to = (script, 1), + ) else: - script_content = cmd_args([ + script_content = cmd_args( "@echo off", "setlocal EnableDelayedExpansion", - "set __RESOURCES_ROOT=^", - resources_dir, # Fully qualified script path. "set __SRC=%~f0", # This is essentially a realpath. @@ -96,11 +86,11 @@ def _generate_script( # Get parent folder. 'for %%a in ("%__SRC%") do set "__SCRIPT_DIR=%%~dpa"', "set BUCK_SH_BINARY_VERSION_UNSTABLE=2", - # ':~3' strips the first 3 chars of __RESOURCES_ROOT. - "set BUCK_PROJECT_ROOT=%__SCRIPT_DIR%\\!__RESOURCES_ROOT:~3!", + cmd_args("set BUCK_PROJECT_ROOT=%__SCRIPT_DIR%\\", resources_dir, delimiter = ""), "set BUCK_DEFAULT_RUNTIME_RESOURCES=%BUCK_PROJECT_ROOT%", "%BUCK_PROJECT_ROOT%\\{} %*".format(main_link), - ]).relative_to(script) + relative_to = (script, 1), + ) actions.write( script, script_content, @@ -128,11 +118,13 @@ def sh_binary_impl(ctx): is_windows, ) + script = script.with_associated_artifacts([resources_dir]) + return [ DefaultInfo(default_output = script, other_outputs = [resources_dir]), RunInfo( # TODO(cjhopman): Figure out if we need to specify the link targets # as inputs. We shouldn't need to, but need to verify it. - args = cmd_args(script).hidden(resources_dir), + args = cmd_args(script, hidden = resources_dir), ), ] diff --git a/prelude/sh_test.bzl b/prelude/sh_test.bzl index d51eeed8ed5..9bb08fabdd0 100644 --- a/prelude/sh_test.bzl +++ b/prelude/sh_test.bzl @@ -14,22 +14,24 @@ def sh_test_impl(ctx: AnalysisContext) -> list[Provider]: if ctx.attrs.list_args or ctx.attrs.list_env or ctx.attrs.run_args or ctx.attrs.run_env: fail("An unsupported attribute was passed") - args = cmd_args() + args_args = [] + args_hidden = [] if ctx.attrs.test != None: if type(ctx.attrs.test) == "artifact": - args.add(ctx.attrs.test) + args_args.append(ctx.attrs.test) elif isinstance(ctx.attrs.test, Dependency): run_info = ctx.attrs.test.get(RunInfo) if run_info != None: - args.add(run_info.args) + args_args.append(run_info.args) else: info = ctx.attrs.test[DefaultInfo] - args.add(info.default_outputs).hidden(info.other_outputs) + args_args.append(info.default_outputs) + args_hidden.append(info.other_outputs) else: fail("Unexpected type for test attribute") - args.hidden(ctx.attrs.resources) + args_hidden.append(ctx.attrs.resources) deps = [] for dep in ctx.attrs.deps: @@ -37,7 +39,9 @@ def sh_test_impl(ctx: AnalysisContext) -> list[Provider]: deps.extend(info.default_outputs) deps.extend(info.other_outputs) - args.hidden(deps) + args_hidden.append(deps) + + args = cmd_args(args_args, hidden = args_hidden) command = [args] + ctx.attrs.args @@ -45,7 +49,7 @@ def sh_test_impl(ctx: AnalysisContext) -> list[Provider]: re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote - # excution options were specified + # execution options were specified run_from_project_root = "buck2_run_from_project_root" in (ctx.attrs.labels or []) or re_executor != None # TODO support default info and runinfo properly by writing a sh script that invokes the command properly diff --git a/prelude/test/inject_test_run_info.bzl b/prelude/test/inject_test_run_info.bzl index 932667a06fa..bba28d60fa0 100644 --- a/prelude/test/inject_test_run_info.bzl +++ b/prelude/test/inject_test_run_info.bzl @@ -5,11 +5,22 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load( + "@prelude//tests:re_utils.bzl", + "maybe_add_run_as_bundle_label", +) + def inject_test_run_info(ctx: AnalysisContext, test_info: ExternalRunnerTestInfo) -> list[Provider]: # Access this here so we get failures in CI if we forget to inject it # anywhere, regardless of whether an `env` is used. inject_test_env = ctx.attrs._inject_test_env[RunInfo] + # `if test_info.labels != None` doesn't work because `None` is not of type `list[str]`, + # yet it is None in some cases... this hack lets us check for None without a type error. + if getattr(test_info, "labels", None) != None: + # If forcing RE on tpx, check if the test suite should be run as a bundle + maybe_add_run_as_bundle_label(ctx, test_info.labels) + if (not test_info.env) or _exclude_test_env_from_run_info(ctx): return [test_info, RunInfo(args = test_info.command)] diff --git a/prelude/test/tools/BUCK.v2 b/prelude/test/tools/BUCK.v2 index 89ed9074211..1c3928706f2 100644 --- a/prelude/test/tools/BUCK.v2 +++ b/prelude/test/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.python_bootstrap_binary( diff --git a/prelude/tests/re_utils.bzl b/prelude/tests/re_utils.bzl index 514396604ff..d5ef3661129 100644 --- a/prelude/tests/re_utils.bzl +++ b/prelude/tests/re_utils.bzl @@ -9,24 +9,44 @@ load("@prelude//:build_mode.bzl", "BuildModeInfo") load("@prelude//tests:remote_test_execution_toolchain.bzl", "RemoteTestExecutionToolchainInfo") load("@prelude//utils:expect.bzl", "expect_non_none") -def _get_re_arg(ctx: AnalysisContext): +ReArg = record( + re_props = field(dict | None), + default_run_as_bundle = field(bool | None), +) + +def _get_re_arg(ctx: AnalysisContext) -> ReArg: if not hasattr(ctx.attrs, "remote_execution"): - return None + return ReArg(re_props = None, default_run_as_bundle = False) if ctx.attrs.remote_execution != None: - # If this is a string, look up the profile on the RE toolchain. + # If this is a string, look up the re_props on the RE toolchain. if type(ctx.attrs.remote_execution) == type(""): expect_non_none(ctx.attrs._remote_test_execution_toolchain) - return ctx.attrs._remote_test_execution_toolchain[RemoteTestExecutionToolchainInfo].profiles[ctx.attrs.remote_execution] + return ReArg( + re_props = + ctx.attrs._remote_test_execution_toolchain[RemoteTestExecutionToolchainInfo].profiles[ctx.attrs.remote_execution], + default_run_as_bundle = + ctx.attrs._remote_test_execution_toolchain[RemoteTestExecutionToolchainInfo].default_run_as_bundle, + ) - return ctx.attrs.remote_execution + return ReArg(re_props = ctx.attrs.remote_execution, default_run_as_bundle = False) # Check for a default RE option on the toolchain. re_toolchain = ctx.attrs._remote_test_execution_toolchain if re_toolchain != None and re_toolchain[RemoteTestExecutionToolchainInfo].default_profile != None: - return re_toolchain[RemoteTestExecutionToolchainInfo].default_profile + return ReArg( + re_props = re_toolchain[RemoteTestExecutionToolchainInfo].default_profile, + default_run_as_bundle = re_toolchain[RemoteTestExecutionToolchainInfo].default_run_as_bundle, + ) - return None + return ReArg(re_props = None, default_run_as_bundle = False) + +def maybe_add_run_as_bundle_label(ctx: AnalysisContext, labels: list[str]) -> None: + if "re_ignore_force_run_as_bundle" in labels: + return + re_arg = _get_re_arg(ctx) + if re_arg.default_run_as_bundle or read_config("tpx", "force_run_as_bundle") == "True": + labels.extend(["run_as_bundle"]) def get_re_executors_from_props(ctx: AnalysisContext) -> ([CommandExecutorConfig, None], dict[str, CommandExecutorConfig]): """ @@ -35,9 +55,20 @@ def get_re_executors_from_props(ctx: AnalysisContext) -> ([CommandExecutorConfig Returns (default_executor, executor_overrides). """ - re_props = _get_re_arg(ctx) + re_props = _get_re_arg(ctx).re_props if re_props == None: - return None, {} + # If no RE args are set and an RE config is specified + if bool(read_config("tpx", "force_re_props")): + re_props = { + "capabilities": { + "platform": read_config("remoteexecution", "platform"), + "subplatform": read_config("remoteexecution", "subplatform"), + }, + "use_case": read_config("remoteexecution", "use_case"), + } + + else: + return None, {} re_props_copy = dict(re_props) capabilities = re_props_copy.pop("capabilities") @@ -45,6 +76,9 @@ def get_re_executors_from_props(ctx: AnalysisContext) -> ([CommandExecutorConfig listing_capabilities = re_props_copy.pop("listing_capabilities", None) remote_cache_enabled = re_props_copy.pop("remote_cache_enabled", None) re_dependencies = re_props_copy.pop("dependencies", []) + local_enabled = re_props_copy.pop("local_enabled", False) + local_listing_enabled = re_props_copy.pop("local_listing_enabled", False) + re_resource_units = re_props_copy.pop("resource_units", None) if re_props_copy: unexpected_props = ", ".join(re_props_copy.keys()) fail("found unexpected re props: " + unexpected_props) @@ -55,22 +89,24 @@ def get_re_executors_from_props(ctx: AnalysisContext) -> ([CommandExecutorConfig remote_execution_action_key = "{}={}".format(build_mode_info.cell, build_mode_info.mode) default_executor = CommandExecutorConfig( - local_enabled = False, + local_enabled = local_enabled, remote_enabled = True, remote_execution_properties = capabilities, remote_execution_use_case = use_case or "tpx-default", remote_cache_enabled = remote_cache_enabled, remote_execution_action_key = remote_execution_action_key, remote_execution_dependencies = re_dependencies, + remote_execution_resource_units = re_resource_units, ) listing_executor = default_executor if listing_capabilities: listing_executor = CommandExecutorConfig( - local_enabled = False, + local_enabled = local_listing_enabled or False, remote_enabled = True, remote_execution_properties = listing_capabilities, remote_execution_use_case = use_case or "tpx-default", remote_cache_enabled = remote_cache_enabled, remote_execution_action_key = remote_execution_action_key, + remote_execution_resource_units = re_resource_units, ) return default_executor, {"listing": listing_executor} diff --git a/prelude/tests/remote_test_execution_toolchain.bzl b/prelude/tests/remote_test_execution_toolchain.bzl index 11e9a047cb9..306c898d8c1 100644 --- a/prelude/tests/remote_test_execution_toolchain.bzl +++ b/prelude/tests/remote_test_execution_toolchain.bzl @@ -12,5 +12,9 @@ RemoteTestExecutionToolchainInfo = provider( # A dictionary of string names to pre-registered profiles. Rules can # use the profile name to references these. "profiles", + # A bool indicating whether the test suites executed by this toolchain + # should be run in a bundle. This makes all tests in a suite run in + # a single RE action as opposed to one action per test. + "default_run_as_bundle", ], ) diff --git a/prelude/tests/tpx_re_legacy.bzl b/prelude/tests/tpx_re_legacy.bzl deleted file mode 100644 index a47c911ebd1..00000000000 --- a/prelude/tests/tpx_re_legacy.bzl +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -load("@prelude//utils:expect.bzl", "expect") - -_RE_ENABLED = "supports_remote_execution" -_RE_OPTS_LABEL_PREFIX = "re_opts_capabilities=" -_RE_OPTS_KEYS = ["platform", "subplatform", "gpu_name"] - -def _parse_re_opts(labels: list[str]) -> [dict[str, str], None]: - """ - Parse out JSON-embedded RE options like: - 're_opts_capabilities={"platform": gpu-remote-execution, "gpu_name": "A100"}' - """ - - for label in labels: - if label.startswith(_RE_OPTS_LABEL_PREFIX): - result = json.decode(label[len(_RE_OPTS_LABEL_PREFIX):]) - for key in result.keys(): - expect(key in _RE_OPTS_KEYS, "unexpected key in RE options label: {}", key) - return result - - return None - -# TODO(agallagher): Parsing RE options via JSON embedded in labels isn't a great -# UI, and we just do it here to support existing use cases. Ideally, though, we'd -# present a better UI (e.g. an `re_opts` param for tests) and use that instead. -def get_re_executor_from_labels(labels: list[str]) -> [CommandExecutorConfig, None]: - """ - Parse legacy RE-enablement test labels and use them to configure a test RE - executor to run the test with. - - The UI is best documented at: - https://www.internalfb.com/intern/wiki/Remote_Execution/Users/GPU_RE_Contbuild_Migration/ - """ - - # If the special "RE enabled" label isn't present, abort. - if _RE_ENABLED not in labels: - return None - - # If there's no options found in labels, don't use RE. This diverges from - # v1 behavior, but v2+tpx needs some platform to be set and so we probably - # want to the toolchain tp provide some exec-platform compatible platform. - re_opts = _parse_re_opts(labels) - if re_opts == None: - return None - - return CommandExecutorConfig( - local_enabled = False, - remote_enabled = True, - remote_execution_properties = re_opts, - remote_execution_use_case = "tpx-default", - ) diff --git a/prelude/third-party/build.bzl b/prelude/third-party/build.bzl new file mode 100644 index 00000000000..b194132adaf --- /dev/null +++ b/prelude/third-party/build.bzl @@ -0,0 +1,150 @@ +load("@prelude//:artifacts.bzl", "ArtifactExt", "artifact_ext") +load("@prelude//:paths.bzl", path_utils = "paths") +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load("@prelude//cxx:preprocessor.bzl", "CPreprocessorInfo") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", + "gen_shared_libs_action", +) +load( + "@prelude//python:manifest.bzl", + "ManifestInfo", # @unused Used as a type +) +load(":providers.bzl", "ThirdPartyBuild", "ThirdPartyBuildInfo", "third_party_build_info") + +def project_from_label(label: Label) -> str: + """ + Generate a unique third-party project name for the given label. + """ + return str(label.raw_target()) + +def prefix_from_label(label: Label, prefix: str = "/usr/local") -> str: + """ + Generate a unique third-party prefix for the given label. + """ + return path_utils.join(prefix, "{}-{}".format(label.name, sha256(str(label.raw_target()))[:7])) + +def create_third_party_build_root( + ctx: AnalysisContext, + out: str = "__third_party_build__", + manifests: list[(str, ManifestInfo)] = [], + shared_libs: list[SharedLibrary] = [], + cxx_headers: list[CPreprocessorInfo] = [], + paths: list[(str, Artifact)] = []) -> ArtifactExt: + """ + Installs components into a unix-y install dir which can by used by other + third-party builds. + """ + + cmd = cmd_args() + cmd.add(ctx.attrs._create_third_party_build_root[RunInfo]) + + for dst, manifest in manifests: + cmd.add( + "--manifest", + dst, + cmd_args(manifest.manifest, hidden = [a for a, _ in manifest.artifacts]), + ) + + for pps in cxx_headers: + for pp in pps.set.value: + for hdr in pp.headers: + cmd.add("--path", path_utils.join("include", hdr.namespace, hdr.name), hdr.artifact) + + for dst, path in paths: + cmd.add("--path", dst, path) + + def gen(actions, output, shared_libs): + lines = [] + if shared_libs: + sh_ext = get_cxx_toolchain_info(ctx).linker_info.shared_library_name_format.format("") + for soname, shared_lib in shared_libs.items(): + lines.append(cmd_args("--path", path_utils.join("lib", soname), shared_lib.lib.output)) + + # Linker link `-l` dynamically (by default) by looking for `lib.so`, + # so make sure this exists by creating it as a symlink (to the versioned name) + # if it doesn't already. + if sh_ext in soname and not soname.endswith(sh_ext): + idx = soname.index(sh_ext) + link_name = soname[:idx + 3] + lines.append(cmd_args("--symlink", path_utils.join("lib", link_name), soname)) + return actions.write(output.as_output(), lines) + + # Add shlibs via argfsile. + argsfile = gen_shared_libs_action( + actions = ctx.actions, + out = "shared_libs_args.txt", + shared_libs = shared_libs, + gen_action = gen, + ) + cmd.add(cmd_args(argsfile, format = "@{}", hidden = [s.lib.output for s in shared_libs])) + + out = ctx.actions.declare_output(out, dir = True) + cmd.add(out.as_output()) + + ctx.actions.run(cmd, category = "third_party_build_root") + + return artifact_ext(out) + +def create_third_party_build_info( + ctx: AnalysisContext, + project: str | None = None, + prefix: str | None = None, + out: str = "__third_party_build__", + manifests: list[(str, ManifestInfo)] = [], + shared_libs: list[SharedLibrary] = [], + cxx_headers: list[CPreprocessorInfo] = [], + cxx_header_dirs: list[str] = [], + paths: list[(str, Artifact)] = [], + deps: list[Dependency] = []) -> ThirdPartyBuildInfo: + if project == None: + project = project_from_label(ctx.label) + if prefix == None: + prefix = prefix_from_label(ctx.label) + + root = create_third_party_build_root( + ctx = ctx, + out = out, + manifests = manifests, + cxx_headers = cxx_headers, + shared_libs = shared_libs, + paths = paths, + ) + + # Build manifest. + def gen_manifest(actions, output, shared_libs): + manifest = {} + manifest["project"] = project + manifest["prefix"] = prefix + if cxx_header_dirs: + manifest["c_include_paths"] = cxx_header_dirs + manifest["cxx_include_paths"] = cxx_header_dirs + if shared_libs: + manifest["runtime_lib_paths"] = ["lib"] + libs = [] + sh_ext = get_cxx_toolchain_info(ctx).linker_info.shared_library_name_format.format("") + for soname in shared_libs: + if sh_ext in soname: + lib = soname.split(sh_ext)[0].removeprefix("lib") + libs.append("-l{}".format(lib)) + manifest["libs"] = libs + return actions.write_json(output.as_output(), manifest, pretty = True) + + manifest = gen_shared_libs_action( + actions = ctx.actions, + out = out + ".json", + shared_libs = shared_libs, + gen_action = gen_manifest, + ) + + return third_party_build_info( + actions = ctx.actions, + build = ThirdPartyBuild( + project = project, + prefix = prefix, + root = root, + manifest = manifest, + ), + deps = deps, + ) diff --git a/prelude/third-party/hmaptool/BUCK.v2 b/prelude/third-party/hmaptool/BUCK.v2 index cced36f1dcb..4a96edf2e1e 100644 --- a/prelude/third-party/hmaptool/BUCK.v2 +++ b/prelude/third-party/hmaptool/BUCK.v2 @@ -1,10 +1,18 @@ -native.export_file( +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + +prelude = native # Avoid warnings and auto-formatters + +prelude.export_file( name = "_hmaptool", src = "hmaptool", mode = "reference", ) -native.command_alias( +prelude.command_alias( name = "hmaptool", exe = ":_hmaptool", visibility = ["PUBLIC"], diff --git a/prelude/third-party/hmaptool/hmaptool b/prelude/third-party/hmaptool/hmaptool index a85bc517721..e6e99058b4e 100755 --- a/prelude/third-party/hmaptool/hmaptool +++ b/prelude/third-party/hmaptool/hmaptool @@ -118,6 +118,24 @@ class HeaderMap(object): yield (self.get_string(key_idx), self.get_string(prefix_idx) + self.get_string(suffix_idx)) +class StringTable: + def __init__(self): + # A string table offset of 0 is interpreted as an empty bucket, so it's + # important we don't assign an actual string to that offset. + self.table = "\0" + # For the same reason we don't want the empty string having a 0 offset. + self.offsets = {} + + def add(self, string): + offset = self.offsets.get(string) + if offset: + return offset + + offset = len(self.table) + self.table += string + "\0" + self.offsets[string] = offset + return offset + ### def action_dump(name, args): @@ -128,6 +146,9 @@ def action_dump(name, args): parser.add_option("-v", "--verbose", dest="verbose", help="show more verbose output [%default]", action="store_true", default=False) + parser.add_option("--json", dest="json", + help="output as JSON [%default]", + action="store_true", default=False) (opts, args) = parser.parse_args(args) if len(args) != 1: @@ -138,7 +159,6 @@ def action_dump(name, args): hmap = HeaderMap.frompath(path) # Dump all of the buckets. - print ('Header Map: %s' % (path,)) if opts.verbose: print ('headermap: %r' % (path,)) print (' num entries: %d' % (hmap.num_entries,)) @@ -157,7 +177,10 @@ def action_dump(name, args): print (" bucket[%d]: %r -> (%r, %r) -- %d" % ( i, key, prefix, suffix, (hmap_hash(key) & (len(hmap.buckets) - 1)))) + elif opts.json: + print(json.dumps({"mappings": dict(hmap.mappings)}, indent=4)) else: + print ('Header Map: %s' % (path,)) mappings = sorted(hmap.mappings) for key,value in mappings: print ("%s -> %s" % (key, value)) @@ -190,7 +213,7 @@ def action_write(name, args): table = [(0, 0, 0) for i in range(num_buckets)] max_value_len = 0 - strtable = "\0" + strtable = StringTable() for key,value in mappings.items(): if not isinstance(key, str): key = key.decode('utf-8') @@ -198,17 +221,14 @@ def action_write(name, args): value = value.decode('utf-8') max_value_len = max(max_value_len, len(value)) - key_idx = len(strtable) - strtable += key + '\0' + key_idx = strtable.add(key) prefix, suffix = os.path.split(value) # This guarantees that prefix + suffix == value in all cases, including when # prefix is empty or contains a trailing slash or suffix is empty (hence the use # of `len(value) - len(suffix)` instead of just `-len(suffix)`. prefix += value[len(prefix) : len(value) - len(suffix)] - prefix_idx = len(strtable) - strtable += prefix + '\0' - suffix_idx = len(strtable) - strtable += suffix + '\0' + prefix_idx = strtable.add(prefix) + suffix_idx = strtable.add(suffix) hash = hmap_hash(key) for i in range(num_buckets): @@ -236,7 +256,7 @@ def action_write(name, args): f.write(struct.pack(header_fmt, *header)) for bucket in table: f.write(struct.pack(bucket_fmt, *bucket)) - f.write(strtable.encode()) + f.write(strtable.table.encode()) def action_tovfs(name, args): "convert a headermap to a VFS layout" diff --git a/prelude/third-party/pkgconfig.bzl b/prelude/third-party/pkgconfig.bzl new file mode 100644 index 00000000000..e9dbc5f122e --- /dev/null +++ b/prelude/third-party/pkgconfig.bzl @@ -0,0 +1,87 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:prelude.bzl", "native") + +# NB: Meta engineers should not use this! Please use tp2 instead: +# https://fburl.com/wiki/oyy0fi5j +# +# If a system has a package installed and that package provides a `.pc` file +# this rule can be used to make that library visible to other rules. The `name` +# of this rule should be the pkg-config name. For example, if +# `pkg-config --libs gtest` prints out the flags to link against gtest, then +# `external_pkgconfig_library(name = "gtest")` would allow other rules to +# depend on gtest. +# +# WARNING: dependencies are not resolved by pkg-config, so these must be specified +# manually with `deps`. Additionally, ABI/platform differences are not handled +# by this rule so be careful not to cache it in Remote Execution etc to prevent +# different machines from reusing the outputs of these rules. +def external_pkgconfig_library( + name, + package = None, + visibility = ["PUBLIC"], + labels = [], + default_target_platform = "prelude//platforms:default", + deps = [], + fallback = None): + if package == None: + package = name + + cmd_cflags = "pkg-config --cflags {} > $OUT".format(package) + cmd_libs = "pkg-config --libs {} > $OUT".format(package) + + if fallback != None: + preprocessor_flags = ( + fallback.preprocessor_flags if hasattr(fallback, "preprocessor_flags") else [] + ) + linker_flags = ( + fallback.linker_flags if hasattr(fallback, "linker_flags") else [] + ) + + cmd_cflags = "if pkg-config --exists {}; then {}; else echo {} > $OUT; fi".format( + package, + cmd_cflags, + " ".join(preprocessor_flags), + ) + + cmd_libs = "if pkg-config --exists {}; then {}; else echo {} > $OUT; fi".format( + package, + cmd_libs, + " ".join(linker_flags), + ) + + pkg_config_cflags = name + "__pkg_config_cflags" + native.genrule( + name = pkg_config_cflags, + default_target_platform = default_target_platform, + out = "out", + cmd = cmd_cflags, + remote = False, + ) + + pkg_config_libs = name + "__pkg_config_libs" + native.genrule( + name = pkg_config_libs, + default_target_platform = default_target_platform, + out = "out", + cmd = cmd_libs, + remote = False, + ) + + labels = list(labels) + labels.append("third-party:pkg-config:{}".format(package)) + + native.prebuilt_cxx_library( + name = name, + default_target_platform = default_target_platform, + visibility = visibility, + exported_preprocessor_flags = ["@$(location :{})".format(pkg_config_cflags)], + exported_linker_flags = ["@$(location :{})".format(pkg_config_libs)], + exported_deps = deps, + labels = labels, + ) diff --git a/prelude/third-party/providers.bzl b/prelude/third-party/providers.bzl new file mode 100644 index 00000000000..27096e1ae32 --- /dev/null +++ b/prelude/third-party/providers.bzl @@ -0,0 +1,53 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:artifacts.bzl", "ArtifactExt") + +ThirdPartyBuild = record( + # A logical project name for the project, currently used for logging. + project = field(str), + # The directory containing the build output. + root = field(ArtifactExt), + # The prefix to install the build output. + prefix = field(str, "/"), + # A manifest of build env settings to use to build against this build. + manifest = field(Artifact | None, None), + # Environment variables to set to build against this project. + # TODO(agallagher): Can this move into the manifest? + exported_env = field(dict[str, str], {}), +) + +# Work-around for buck2 bug causing "transitive values must be of the same +# transitive set type" errors: +# https://fb.prod.workplace.com/groups/buck2users/posts/3637287806527574/ +ThirdPartyBuildTSet = transitive_set() +ThirdPartyBuildInfo = provider(fields = { + "build": provider_field(ThirdPartyBuild | None), + "_tset": provider_field(ThirdPartyBuildTSet), +}) + +def third_party_build_info( + actions, + build: [ThirdPartyBuild, None] = None, + children: list[ThirdPartyBuildInfo] = [], + deps: list[Dependency] = []) -> ThirdPartyBuildInfo: + kwargs = {} + if build != None: + kwargs["value"] = build + if deps or children: + kwargs["children"] = [ + child._tset + for child in children + ] + [ + dep[ThirdPartyBuildInfo]._tset + for dep in deps + if ThirdPartyBuildInfo in dep + ] + return ThirdPartyBuildInfo( + build = build, + _tset = actions.tset(ThirdPartyBuildTSet, **kwargs), + ) diff --git a/prelude/third-party/tools/BUCK.v2 b/prelude/third-party/tools/BUCK.v2 new file mode 100644 index 00000000000..e241143bff8 --- /dev/null +++ b/prelude/third-party/tools/BUCK.v2 @@ -0,0 +1,11 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +source_listing() + +prelude = native + +prelude.python_bootstrap_binary( + name = "create_build", + main = "create_build.py", + visibility = ["PUBLIC"], +) diff --git a/prelude/third-party/tools/create_build.py b/prelude/third-party/tools/create_build.py new file mode 100644 index 00000000000..7699004c49b --- /dev/null +++ b/prelude/third-party/tools/create_build.py @@ -0,0 +1,61 @@ +import argparse +import shutil +import stat +import json +import sys +import os + + +# Copy only file contents and exec permission bit. +def _copy(src, dst, *, follow_symlinks=True): + shutil.copyfile(src, dst, follow_symlinks=False) + src_mode = os.lstat(src).st_mode + dst_mode = os.lstat(dst).st_mode + os.chmod( + dst, + dst_mode | (src_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)), + follow_symlinks=False, + ) + + +def main(argv): + parser = argparse.ArgumentParser(fromfile_prefix_chars="@") + parser.add_argument( + "--manifest", dest="manifests", nargs=2, action="append", default=[] + ) + parser.add_argument("--path", dest="paths", nargs=2, action="append", default=[]) + parser.add_argument( + "--symlink", dest="symlinks", nargs=2, action="append", default=[] + ) + parser.add_argument("output") + args = parser.parse_args(argv[1:]) + + os.makedirs(args.output) + + all_paths = [] + all_paths.extend(args.paths) + + for bdst, manifest in args.manifests: + with open(manifest) as mf: + manifest = json.load(mf) + for dst, src, _ in manifest: + dst = os.path.join(bdst, dst) + all_paths.append((dst, src)) + + for dst, src in all_paths: + fdst = os.path.join(args.output, dst) + os.makedirs(os.path.dirname(fdst), exist_ok=True) + if os.path.isdir(src): + shutil.copytree( + src, fdst, symlinks=True, dirs_exist_ok=True, copy_function=_copy + ) + else: + shutil.copy(src, fdst) + + for dst, target in args.symlinks: + fdst = os.path.join(args.output, dst) + os.makedirs(os.path.dirname(fdst), exist_ok=True) + os.symlink(target, fdst) + + +sys.exit(main(sys.argv)) diff --git a/prelude/toolchains/apple/xcode_version_checker/BUCK.v2 b/prelude/toolchains/apple/xcode_version_checker/BUCK.v2 index 234076e4b39..383dc2dfa00 100644 --- a/prelude/toolchains/apple/xcode_version_checker/BUCK.v2 +++ b/prelude/toolchains/apple/xcode_version_checker/BUCK.v2 @@ -1,6 +1,10 @@ -load("@fbsource//tools/build_defs:fb_native_wrapper.bzl", "fb_native") +load("@prelude//utils:source_listing.bzl", "source_listing") -fb_native.export_file( +oncall("build_infra") + +source_listing() + +export_file( name = "xcode_version_checker", labels = ["buck2-only"], mode = "reference", diff --git a/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.h b/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.h index d84579e95e3..7bbb1e907dd 100644 --- a/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.h +++ b/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.h @@ -6,9 +6,7 @@ * License, Version 2.0 found in the LICENSE-APACHE file in the root directory * of this source tree. */ - -#ifndef __XCODE_VERSION_CHECKS_H__ -#define __XCODE_VERSION_CHECKS_H__ +#pragma once #import @@ -35,5 +33,3 @@ BOOL checkVersionPlistShortVersionMatch( NSString* expectedShortVersion, BOOL logComparisonFailure); BOOL checkXcodeShortVersionMatch(NSString* expectedShortVersion); - -#endif diff --git a/prelude/toolchains/conan/BUCK b/prelude/toolchains/conan/BUCK index 8c665881e67..0edb38d5db9 100644 --- a/prelude/toolchains/conan/BUCK +++ b/prelude/toolchains/conan/BUCK @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + export_file( name = "buckler", src = "buckler/conanfile.py", diff --git a/prelude/toolchains/conan/conan_common.py b/prelude/toolchains/conan/conan_common.py index 7abe794c77b..7f324df0cd9 100644 --- a/prelude/toolchains/conan/conan_common.py +++ b/prelude/toolchains/conan/conan_common.py @@ -161,7 +161,7 @@ def conan_env(user_home=None, trace_log=None): # env["CONAN_REVISIONS_ENABLED"] = "1" # Prevent over-allocation. - # TODO[AH] Support parallized package builds and set an appropriate action + # TODO[AH] Support parallelized package builds and set an appropriate action # weight using the `weight` parameter to `ctx.actions.run`. # Note that not all Conan packages respect the `CONAN_CPU_COUNT` setting. env["CONAN_CPU_COUNT"] = "1" diff --git a/prelude/toolchains/conan/conan_update.py b/prelude/toolchains/conan/conan_update.py index 30986dfed6e..f52f75beded 100644 --- a/prelude/toolchains/conan/conan_update.py +++ b/prelude/toolchains/conan/conan_update.py @@ -1,4 +1,11 @@ #!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + import argparse import os import shutil @@ -19,7 +26,7 @@ def write_lockfile(lockfile, lockfile_out): def write_targets(update_label, lock_generate, conan_generate, targets_out): header = """\ -# @generated +# {at}generated # Update using `buck2 run {update_label}` load( @@ -28,64 +35,74 @@ def write_targets(update_label, lock_generate, conan_generate, targets_out): "conan_dep", "conan_package", ) -""".format(update_label = update_label) +""".format( + at="@", update_label=update_label + ) os.makedirs(os.path.dirname(targets_out), exist_ok=True) with open(targets_out, "w") as outf: outf.write(header) with open(lock_generate, "r") as inf: - for l in inf: - outf.write(l) + for x in inf: + outf.write(x) with open(conan_generate, "r") as inf: - for l in inf: - outf.write(l) + for x in inf: + outf.write(x) def main(): parser = argparse.ArgumentParser( - prog = "conan_update", - description = "Update the Conan lock-file and the Buck2 package imports.") + prog="conan_update", + description="Update the Conan lock-file and the Buck2 package imports.", + ) parser.add_argument( - "--update-label", - metavar="LABEL", - type=str, - required=True, - help="The label to the target to run this program.") + "--update-label", + metavar="LABEL", + type=str, + required=True, + help="The label to the target to run this program.", + ) parser.add_argument( - "--lockfile", - metavar="FILE", - type=str, - required=True, - help="Path to the lockfile to copy to the repository.") + "--lockfile", + metavar="FILE", + type=str, + required=True, + help="Path to the lockfile to copy to the repository.", + ) parser.add_argument( - "--lock-targets", - metavar="FILE", - type=str, - required=True, - help="Path to the targets file generated from the lock file.") + "--lock-targets", + metavar="FILE", + type=str, + required=True, + help="Path to the targets file generated from the lock file.", + ) parser.add_argument( - "--conan-targets", - metavar="FILE", - type=str, - required=True, - help="Path to the targets file generated by Buckler.") + "--conan-targets", + metavar="FILE", + type=str, + required=True, + help="Path to the targets file generated by Buckler.", + ) parser.add_argument( - "--conanfile", - metavar="FILE", - type=str, - required=True, - help="Path to the Conanfile.") + "--conanfile", + metavar="FILE", + type=str, + required=True, + help="Path to the Conanfile.", + ) parser.add_argument( - "--lockfile-out", - metavar="FILE", - type=str, - required=True, - help="Name of the Conan lock-file to generate, relative to the Conanfile.") + "--lockfile-out", + metavar="FILE", + type=str, + required=True, + help="Name of the Conan lock-file to generate, relative to the Conanfile.", + ) parser.add_argument( - "--targets-out", - metavar="FILE", - type=str, - required=True, - help="Name of the Starlark file to generate, relative to the Conanfile.") + "--targets-out", + metavar="FILE", + type=str, + required=True, + help="Name of the Starlark file to generate, relative to the Conanfile.", + ) args = parser.parse_args() root = find_root() diff --git a/prelude/toolchains/conan/defs.bzl b/prelude/toolchains/conan/defs.bzl index 029ca6530cb..8f65db3dfac 100644 --- a/prelude/toolchains/conan/defs.bzl +++ b/prelude/toolchains/conan/defs.bzl @@ -431,21 +431,25 @@ def _conan_generate_impl(ctx: AnalysisContext) -> list[Provider]: trace_log = ctx.actions.declare_output("trace.log") targets_out = ctx.actions.declare_output(ctx.label.name + ".bzl") - cmd = cmd_args([conan_generate]) - cmd.add(["--conan", conan_toolchain.conan]) - cmd.add(["--conan-init", conan_init.user_home]) - cmd.hidden(conan_init.profile.config) # The profile is inlined in the lockfile. - cmd.hidden(conan_init.profile.inputs) - cmd.add(["--buckler", ctx.attrs._buckler]) - cmd.add(["--install-folder", install_folder.as_output()]) - cmd.add(["--output-folder", output_folder.as_output()]) - cmd.add(["--user-home", user_home.as_output()]) - cmd.add(["--manifests", manifests.as_output()]) - cmd.add(["--install-info", install_info.as_output()]) - cmd.add(["--trace-file", trace_log.as_output()]) - cmd.add(["--conanfile", ctx.attrs.conanfile]) - cmd.add(["--lockfile", ctx.attrs.lockfile]) - cmd.add(["--targets-out", targets_out.as_output()]) + cmd = cmd_args( + [conan_generate] + + ["--conan", conan_toolchain.conan] + + ["--conan-init", conan_init.user_home] + + ["--buckler", ctx.attrs._buckler] + + ["--install-folder", install_folder.as_output()] + + ["--output-folder", output_folder.as_output()] + + ["--user-home", user_home.as_output()] + + ["--manifests", manifests.as_output()] + + ["--install-info", install_info.as_output()] + + ["--trace-file", trace_log.as_output()] + + ["--conanfile", ctx.attrs.conanfile] + + ["--lockfile", ctx.attrs.lockfile] + + ["--targets-out", targets_out.as_output()], + hidden = [ + conan_init.profile.config, # The profile is inlined in the lockfile. + conan_init.profile.inputs, + ], + ) ctx.actions.run(cmd, category = "conan_build") return [ @@ -482,10 +486,12 @@ def _conan_init_impl(ctx: AnalysisContext) -> list[Provider]: user_home = ctx.actions.declare_output("user-home") trace_log = ctx.actions.declare_output("trace.log") - cmd = cmd_args([conan_init]) - cmd.add(["--conan", conan_toolchain.conan]) - cmd.add(["--user-home", user_home.as_output()]) - cmd.add(["--trace-file", trace_log.as_output()]) + cmd = cmd_args( + [conan_init] + + ["--conan", conan_toolchain.conan] + + ["--user-home", user_home.as_output()] + + ["--trace-file", trace_log.as_output()], + ) ctx.actions.run(cmd, category = "conan_init") return [ @@ -522,17 +528,18 @@ def _conan_lock_impl(ctx: AnalysisContext) -> list[Provider]: user_home = ctx.actions.declare_output("user-home") trace_log = ctx.actions.declare_output("trace.log") - cmd = cmd_args([conan_lock]) - cmd.add(["--conan", conan_toolchain.conan]) - cmd.add(["--conan-init", conan_init.user_home]) - cmd.add(["--profile", conan_init.profile.config]) - cmd.hidden(conan_init.profile.inputs) - cmd.add(["--user-home", user_home.as_output()]) - cmd.add(["--trace-file", trace_log.as_output()]) - cmd.add(["--conanfile", ctx.attrs.conanfile]) - cmd.add(["--lockfile-out", lockfile_out.as_output()]) - if ctx.attrs.lockfile: - cmd.add(["--lockfile", ctx.attrs.lockfile]) + cmd = cmd_args( + [conan_lock] + + ["--conan", conan_toolchain.conan] + + ["--conan-init", conan_init.user_home] + + ["--profile", conan_init.profile.config] + + ["--user-home", user_home.as_output()] + + ["--trace-file", trace_log.as_output()] + + ["--conanfile", ctx.attrs.conanfile] + + ["--lockfile-out", lockfile_out.as_output()] + + (["--lockfile", ctx.attrs.lockfile] if ctx.attrs.lockfile else []), + hidden = conan_init.profile.inputs, + ) ctx.actions.run(cmd, category = "conan_lock") return [ @@ -571,22 +578,26 @@ def _conan_package_impl(ctx: AnalysisContext) -> list[Provider]: cache_out = ctx.actions.declare_output("cache-out") package_out = ctx.actions.declare_output("package") - cmd = cmd_args([conan_package]) - cmd.add(["--conan", conan_toolchain.conan]) - cmd.add(["--conan-init", conan_init.user_home]) - cmd.hidden(conan_init.profile.config) # The profile is inlined in the lockfile. - cmd.hidden(conan_init.profile.inputs) - cmd.add(["--lockfile", ctx.attrs.lockfile]) - cmd.add(["--reference", ctx.attrs.reference]) - cmd.add(["--package-id", ctx.attrs.package_id]) - cmd.add(["--install-folder", install_folder.as_output()]) - cmd.add(["--output-folder", output_folder.as_output()]) - cmd.add(["--user-home", user_home.as_output()]) - cmd.add(["--manifests", manifests.as_output()]) - cmd.add(["--install-info", install_info.as_output()]) - cmd.add(["--trace-file", trace_log.as_output()]) - cmd.add(["--cache-out", cache_out.as_output()]) - cmd.add(["--package-out", package_out.as_output()]) + cmd = cmd_args( + [conan_package] + + ["--conan", conan_toolchain.conan] + + ["--conan-init", conan_init.user_home] + + ["--lockfile", ctx.attrs.lockfile] + + ["--reference", ctx.attrs.reference] + + ["--package-id", ctx.attrs.package_id] + + ["--install-folder", install_folder.as_output()] + + ["--output-folder", output_folder.as_output()] + + ["--user-home", user_home.as_output()] + + ["--manifests", manifests.as_output()] + + ["--install-info", install_info.as_output()] + + ["--trace-file", trace_log.as_output()] + + ["--cache-out", cache_out.as_output()] + + ["--package-out", package_out.as_output()], + hidden = [ + conan_init.profile.config, # The profile is inlined in the lockfile. + conan_init.profile.inputs, + ], + ) # TODO[AH] Do we need to separate deps and build_deps? # This may become necessary for cross-compilation support. @@ -638,7 +649,7 @@ conan_package = rule( doc = "Build a single Conan package.", ) -def _profile_env_var(name, value): +def _profile_env_var(name, value) -> cmd_args: # TODO[AH] Do we need `quote = "shell"` here? # Setting it causes Buck2 to escape the `$PROFILE_DIR` prefix set in the # very end which causes failures in Conan package builds. @@ -651,9 +662,14 @@ def _make_wrapper_script(ctx, name, tool): cmd_args([ "#!/bin/sh", '_SCRIPTDIR=`dirname "$0"`', - cmd_args("exec", tool, '"$@"', delimiter = " ") - .relative_to(wrapper, parent = 1) - .absolute_prefix('"$_SCRIPTDIR"/'), + cmd_args( + "exec", + tool, + '"$@"', + delimiter = " ", + relative_to = (wrapper, 1), + absolute_prefix = '"$_SCRIPTDIR"/', + ), ]), allow_args = True, is_executable = True, @@ -670,57 +686,60 @@ def _profile_env_tool(ctx, name, tool): that configured as full command lines. """ wrapper, inputs = _make_wrapper_script(ctx, name, tool) - return _profile_env_var(name, wrapper).hidden(tool).hidden(inputs) + return cmd_args(_profile_env_var(name, wrapper), hidden = [tool, inputs]) def _conan_profile_impl(ctx: AnalysisContext) -> list[Provider]: cxx = ctx.attrs._cxx_toolchain[CxxToolchainInfo] - content = cmd_args() + content = [] - content.add("[settings]") - content.add(cmd_args(ctx.attrs.arch, format = "arch={}")) - content.add(cmd_args(ctx.attrs.os, format = "os={}")) - content.add(cmd_args(ctx.attrs.build_type, format = "build_type={}")) + content.append("[settings]") + content.append(cmd_args(ctx.attrs.arch, format = "arch={}")) + content.append(cmd_args(ctx.attrs.os, format = "os={}")) + content.append(cmd_args(ctx.attrs.build_type, format = "build_type={}")) # TODO[AH] Auto-generate the compiler setting based on the toolchain. # Needs a translation of CxxToolProviderType to compiler setting. - content.add(cmd_args(ctx.attrs.compiler, format = "compiler={}")) - content.add(cmd_args(ctx.attrs.compiler_version, format = "compiler.version={}")) - content.add(cmd_args(ctx.attrs.compiler_libcxx, format = "compiler.libcxx={}")) + content.append(cmd_args(ctx.attrs.compiler, format = "compiler={}")) + content.append(cmd_args(ctx.attrs.compiler_version, format = "compiler.version={}")) + content.append(cmd_args(ctx.attrs.compiler_libcxx, format = "compiler.libcxx={}")) - content.add("") - content.add("[env]") - content.add(_profile_env_var("CMAKE_FIND_ROOT_PATH", "")) + content.append("") + content.append("[env]") + content.append(_profile_env_var("CMAKE_FIND_ROOT_PATH", "")) # TODO[AH] Define CMAKE_SYSROOT if needed. # TODO[AH] Define target CHOST for cross-compilation - content.add(_profile_env_tool(ctx, "AR", cxx.linker_info.archiver)) + content.append(_profile_env_tool(ctx, "AR", cxx.linker_info.archiver)) if cxx.as_compiler_info: - content.add(_profile_env_tool(ctx, "AS", cxx.as_compiler_info.compiler)) + content.append(_profile_env_tool(ctx, "AS", cxx.as_compiler_info.compiler)) # TODO[AH] Use asm_compiler_info for Windows if cxx.binary_utilities_info: if cxx.binary_utilities_info.nm: - content.add(_profile_env_tool(ctx, "NM", cxx.binary_utilities_info.nm)) + content.append(_profile_env_tool(ctx, "NM", cxx.binary_utilities_info.nm)) if cxx.binary_utilities_info.ranlib: - content.add(_profile_env_tool(ctx, "RANLIB", cxx.binary_utilities_info.ranlib)) + content.append(_profile_env_tool(ctx, "RANLIB", cxx.binary_utilities_info.ranlib)) if cxx.binary_utilities_info.strip: - content.add(_profile_env_tool(ctx, "STRIP", cxx.binary_utilities_info.strip)) + content.append(_profile_env_tool(ctx, "STRIP", cxx.binary_utilities_info.strip)) if cxx.c_compiler_info: - content.add(_profile_env_tool(ctx, "CC", cxx.c_compiler_info.compiler)) - content.add(_profile_env_var("CFLAGS", cxx.c_compiler_info.compiler_flags)) + content.append(_profile_env_tool(ctx, "CC", cxx.c_compiler_info.compiler)) + content.append(_profile_env_var("CFLAGS", cxx.c_compiler_info.compiler_flags)) if cxx.cxx_compiler_info: - content.add(_profile_env_tool(ctx, "CXX", cxx.cxx_compiler_info.compiler)) - content.add(_profile_env_var("CXXFLAGS", cxx.cxx_compiler_info.compiler_flags)) + content.append(_profile_env_tool(ctx, "CXX", cxx.cxx_compiler_info.compiler)) + content.append(_profile_env_var("CXXFLAGS", cxx.cxx_compiler_info.compiler_flags)) output = ctx.actions.declare_output(ctx.label.name) - content.relative_to(output, parent = 1) - content.absolute_prefix("$PROFILE_DIR/") + content = cmd_args( + content, + relative_to = (output, 1), + absolute_prefix = "$PROFILE_DIR/", + ) _, args_inputs = ctx.actions.write(output, content, allow_args = True) return [ DefaultInfo(default_outputs = [output]), - ConanProfileInfo(config = output, inputs = content.hidden(args_inputs)), + ConanProfileInfo(config = output, inputs = cmd_args(content, hidden = args_inputs)), ] conan_profile = rule( @@ -740,14 +759,16 @@ conan_profile = rule( def _conan_update_impl(ctx: AnalysisContext) -> list[Provider]: conan_update = ctx.attrs._conan_update[RunInfo] - cmd = cmd_args([conan_update]) - cmd.add(["--update-label", str(ctx.label.raw_target())]) - cmd.add(["--lockfile", ctx.attrs.lockfile]) - cmd.add(["--lock-targets", ctx.attrs.lock_generate]) - cmd.add(["--conan-targets", ctx.attrs.conan_generate]) - cmd.add(["--conanfile", ctx.attrs.conanfile]) - cmd.add(["--lockfile-out", ctx.attrs.lockfile_name]) - cmd.add(["--targets-out", ctx.attrs.targets_name]) + cmd = cmd_args( + [conan_update] + + ["--update-label", str(ctx.label.raw_target())] + + ["--lockfile", ctx.attrs.lockfile] + + ["--lock-targets", ctx.attrs.lock_generate] + + ["--conan-targets", ctx.attrs.conan_generate] + + ["--conanfile", ctx.attrs.conanfile] + + ["--lockfile-out", ctx.attrs.lockfile_name] + + ["--targets-out", ctx.attrs.targets_name], + ) return [ DefaultInfo(default_outputs = []), @@ -773,10 +794,12 @@ def _lock_generate_impl(ctx: AnalysisContext) -> list[Provider]: targets_out = ctx.actions.declare_output(ctx.label.name + ".bzl") - cmd = cmd_args([lock_generate]) - cmd.add(["--lockfile", ctx.attrs.lockfile]) - cmd.add(["--lockfile-label", str(ctx.attrs.lockfile.owner.raw_target())]) - cmd.add(["--targets-out", targets_out.as_output()]) + cmd = cmd_args( + [lock_generate] + + ["--lockfile", ctx.attrs.lockfile] + + ["--lockfile-label", str(ctx.attrs.lockfile.owner.raw_target())] + + ["--targets-out", targets_out.as_output()], + ) ctx.actions.run(cmd, category = "conan_generate") return [ diff --git a/prelude/toolchains/cxx.bzl b/prelude/toolchains/cxx.bzl index d1b1dc15927..93baa02425f 100644 --- a/prelude/toolchains/cxx.bzl +++ b/prelude/toolchains/cxx.bzl @@ -11,9 +11,11 @@ load( "CCompilerInfo", "CvtresCompilerInfo", "CxxCompilerInfo", + "CxxInternalTools", "CxxPlatformInfo", "CxxToolchainInfo", "LinkerInfo", + "LinkerType", "PicBehavior", "RcCompilerInfo", "ShlibInterfacesMode", @@ -22,53 +24,79 @@ load("@prelude//cxx:headers.bzl", "HeaderMode") load("@prelude//cxx:linker.bzl", "is_pdb_generated") load("@prelude//linking:link_info.bzl", "LinkOrdering", "LinkStyle") load("@prelude//linking:lto.bzl", "LtoMode") -load("@prelude//toolchains/msvc:tools.bzl", "VisualStudio") -load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") +load("@prelude//os_lookup:defs.bzl", "OsLookup") +load("@prelude//decls/common.bzl", "buck") + +CxxToolsInfo = provider( + fields = { + "archiver": provider_field(typing.Any, default = None), + "archiver_type": provider_field(typing.Any, default = None), + "asm_compiler": provider_field(typing.Any, default = None), + "asm_compiler_type": provider_field(typing.Any, default = None), + "compiler": provider_field(typing.Any, default = None), + "compiler_type": provider_field(typing.Any, default = None), + "cvtres_compiler": provider_field(typing.Any, default = None), + "cxx_compiler": provider_field(typing.Any, default = None), + "linker": provider_field(typing.Any, default = None), + "linker_type": LinkerType, + "rc_compiler": provider_field(typing.Any, default = None), + }, +) + +def _legacy_equivalent_cxx_tools_info_windows(ctx: AnalysisContext, default_toolchain: CxxToolsInfo) -> CxxToolsInfo: + return CxxToolsInfo( + compiler = default_toolchain.compiler if ctx.attrs.compiler == None or ctx.attrs.compiler == "cl.exe" else ctx.attrs.compiler, + compiler_type = default_toolchain.compiler_type if ctx.attrs.compiler_type == None else ctx.attrs.compiler_type, + cxx_compiler = default_toolchain.cxx_compiler if ctx.attrs.compiler == None or ctx.attrs.compiler == "cl.exe" else ctx.attrs.compiler, + asm_compiler = default_toolchain.asm_compiler, + asm_compiler_type = default_toolchain.asm_compiler_type, + rc_compiler = default_toolchain.rc_compiler if ctx.attrs.rc_compiler == None or ctx.attrs.rc_compiler == "rc.exe" else ctx.attrs.rc_compiler, + cvtres_compiler = default_toolchain.cvtres_compiler if ctx.attrs.cvtres_compiler == None or ctx.attrs.cvtres_compiler == "cvtres.exe" else ctx.attrs.cvtres_compiler, + archiver = default_toolchain.archiver, + archiver_type = default_toolchain.archiver_type, + linker = default_toolchain.linker if ctx.attrs.linker == None or ctx.attrs.linker == "link.exe" else ctx.attrs.linker, + linker_type = default_toolchain.linker_type, + ) + +def _legacy_equivalent_cxx_tools_info_non_windows(ctx: AnalysisContext, default_toolchain: CxxToolsInfo) -> CxxToolsInfo: + return CxxToolsInfo( + compiler = default_toolchain.compiler if ctx.attrs.compiler == None else ctx.attrs.compiler, + compiler_type = default_toolchain.compiler_type if ctx.attrs.compiler_type == None else ctx.attrs.compiler_type, + cxx_compiler = default_toolchain.cxx_compiler if ctx.attrs.cxx_compiler == None else ctx.attrs.cxx_compiler, + asm_compiler = default_toolchain.asm_compiler if ctx.attrs.compiler == None else ctx.attrs.compiler, + asm_compiler_type = default_toolchain.asm_compiler_type if ctx.attrs.compiler_type == None else ctx.attrs.compiler_type, + rc_compiler = default_toolchain.rc_compiler if ctx.attrs.rc_compiler == None else ctx.attrs.rc_compiler, + cvtres_compiler = default_toolchain.cvtres_compiler if ctx.attrs.cvtres_compiler == None else ctx.attrs.cvtres_compiler, + archiver = default_toolchain.archiver, + archiver_type = default_toolchain.archiver_type, + linker = default_toolchain.linker if ctx.attrs.linker == None else ctx.attrs.linker, + linker_type = default_toolchain.linker_type, + ) def _system_cxx_toolchain_impl(ctx: AnalysisContext): """ A very simple toolchain that is hardcoded to the current environment. """ - archiver_args = ["ar"] - archiver_type = "gnu" - archiver_supports_argfiles = True - asm_compiler = ctx.attrs.compiler - asm_compiler_type = ctx.attrs.compiler_type - compiler = ctx.attrs.compiler - cxx_compiler = ctx.attrs.cxx_compiler - cvtres_compiler = ctx.attrs.cvtres_compiler - rc_compiler = ctx.attrs.rc_compiler - linker = ctx.attrs.linker - linker_type = "gnu" - pic_behavior = PicBehavior("supported") - binary_extension = "" - object_file_extension = "o" - static_library_extension = "a" - shared_library_name_default_prefix = "lib" - shared_library_name_format = "{}.so" - shared_library_versioned_name_format = "{}.so.{}" - additional_linker_flags = [] - if host_info().os.is_macos: - archiver_supports_argfiles = False - linker_type = "darwin" - pic_behavior = PicBehavior("always_enabled") - elif host_info().os.is_windows: - msvc_tools = ctx.attrs.msvc_tools[VisualStudio] - archiver_args = [msvc_tools.lib_exe] - archiver_type = "windows" - asm_compiler = msvc_tools.ml64_exe - asm_compiler_type = "windows_ml64" - if compiler == "cl.exe": - compiler = msvc_tools.cl_exe - cxx_compiler = compiler - if cvtres_compiler == "cvtres.exe": - cvtres_compiler = msvc_tools.cvtres_exe - if rc_compiler == "rc.exe": - rc_compiler = msvc_tools.rc_exe - if linker == "link.exe": - linker = msvc_tools.link_exe - linker = _windows_linker_wrapper(ctx, linker) - linker_type = "windows" + + os = ctx.attrs._target_os_type[OsLookup].platform + arch_name = ctx.attrs._target_os_type[OsLookup].cpu + cxx_tools_info = ctx.attrs._cxx_tools_info[CxxToolsInfo] + cxx_tools_info = _legacy_equivalent_cxx_tools_info_windows(ctx, cxx_tools_info) if os == "windows" else _legacy_equivalent_cxx_tools_info_non_windows(ctx, cxx_tools_info) + target_name = os + if arch_name: + target_name += "-" + arch_name + return _cxx_toolchain_from_cxx_tools_info(ctx, cxx_tools_info, target_name) + +def _cxx_tools_info_toolchain_impl(ctx: AnalysisContext): + return _cxx_toolchain_from_cxx_tools_info(ctx, ctx.attrs.cxx_tools_info[CxxToolsInfo]) + +def _cxx_toolchain_from_cxx_tools_info(ctx: AnalysisContext, cxx_tools_info: CxxToolsInfo, target_name = "x86_64"): + os = ctx.attrs._target_os_type[OsLookup].platform + archiver_supports_argfiles = os != "macos" + additional_linker_flags = ["-fuse-ld=lld"] if os == "linux" and cxx_tools_info.linker != "g++" and cxx_tools_info.cxx_compiler != "g++" else [] + + if os == "windows": + linker_type = LinkerType("windows") binary_extension = "exe" object_file_extension = "obj" static_library_extension = "lib" @@ -76,12 +104,22 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): shared_library_name_format = "{}.dll" shared_library_versioned_name_format = "{}.dll" pic_behavior = PicBehavior("not_supported") - elif ctx.attrs.linker == "g++" or ctx.attrs.cxx_compiler == "g++": - pass else: - additional_linker_flags = ["-fuse-ld=lld"] + binary_extension = "" + object_file_extension = "o" + static_library_extension = "a" + shared_library_name_default_prefix = "lib" + shared_library_name_format = "{}.so" + shared_library_versioned_name_format = "{}.so.{}" - if ctx.attrs.compiler_type == "clang": + if os == "macos": + linker_type = LinkerType("darwin") + pic_behavior = PicBehavior("always_enabled") + else: + linker_type = LinkerType("gnu") + pic_behavior = PicBehavior("supported") + + if cxx_tools_info.compiler_type == "clang": llvm_link = RunInfo(args = ["llvm-link"]) else: llvm_link = None @@ -89,18 +127,19 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): return [ DefaultInfo(), CxxToolchainInfo( - mk_comp_db = ctx.attrs.make_comp_db, + internal_tools = ctx.attrs._internal_tools[CxxInternalTools], linker_info = LinkerInfo( - linker = RunInfo(args = linker), + linker = _run_info(cxx_tools_info.linker), linker_flags = additional_linker_flags + ctx.attrs.link_flags, post_linker_flags = ctx.attrs.post_link_flags, - archiver = RunInfo(args = archiver_args), - archiver_type = archiver_type, + archiver = _run_info(cxx_tools_info.archiver), + archiver_type = cxx_tools_info.archiver_type, archiver_supports_argfiles = archiver_supports_argfiles, generate_linker_maps = False, lto_mode = LtoMode("none"), type = linker_type, link_binaries_locally = True, + link_libraries_locally = True, archive_objects_locally = True, use_archiver_flags = True, static_dep_runtime_ld_flags = [], @@ -118,102 +157,110 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): static_library_extension = static_library_extension, force_full_hybrid_if_capable = False, is_pdb_generated = is_pdb_generated(linker_type, ctx.attrs.link_flags), - produce_interface_from_stub_shared_library = True, link_ordering = ctx.attrs.link_ordering, ), bolt_enabled = False, binary_utilities_info = BinaryUtilitiesInfo( nm = RunInfo(args = ["nm"]), objcopy = RunInfo(args = ["objcopy"]), + objdump = RunInfo(args = ["objdump"]), ranlib = RunInfo(args = ["ranlib"]), strip = RunInfo(args = ["strip"]), dwp = None, bolt_msdk = None, ), cxx_compiler_info = CxxCompilerInfo( - compiler = RunInfo(args = [cxx_compiler]), + compiler = _run_info(cxx_tools_info.cxx_compiler), preprocessor_flags = [], compiler_flags = ctx.attrs.cxx_flags, - compiler_type = ctx.attrs.compiler_type, + compiler_type = cxx_tools_info.compiler_type, ), c_compiler_info = CCompilerInfo( - compiler = RunInfo(args = [compiler]), + compiler = _run_info(cxx_tools_info.compiler), preprocessor_flags = [], compiler_flags = ctx.attrs.c_flags, - compiler_type = ctx.attrs.compiler_type, + compiler_type = cxx_tools_info.compiler_type, ), as_compiler_info = CCompilerInfo( - compiler = RunInfo(args = [compiler]), - compiler_type = ctx.attrs.compiler_type, + compiler = _run_info(cxx_tools_info.compiler), + compiler_type = cxx_tools_info.compiler_type, ), asm_compiler_info = CCompilerInfo( - compiler = RunInfo(args = [asm_compiler]), - compiler_type = asm_compiler_type, + compiler = _run_info(cxx_tools_info.asm_compiler), + compiler_type = cxx_tools_info.asm_compiler_type, ), cvtres_compiler_info = CvtresCompilerInfo( - compiler = RunInfo(args = [cvtres_compiler]), + compiler = _run_info(cxx_tools_info.cvtres_compiler), preprocessor_flags = [], compiler_flags = ctx.attrs.cvtres_flags, - compiler_type = ctx.attrs.compiler_type, + compiler_type = cxx_tools_info.compiler_type, ), rc_compiler_info = RcCompilerInfo( - compiler = RunInfo(args = [rc_compiler]), + compiler = _run_info(cxx_tools_info.rc_compiler), preprocessor_flags = [], compiler_flags = ctx.attrs.rc_flags, - compiler_type = ctx.attrs.compiler_type, + compiler_type = cxx_tools_info.compiler_type, ), header_mode = HeaderMode("symlink_tree_only"), cpp_dep_tracking_mode = ctx.attrs.cpp_dep_tracking_mode, pic_behavior = pic_behavior, llvm_link = llvm_link, ), - CxxPlatformInfo(name = "x86_64"), + CxxPlatformInfo(name = target_name), ] -def _windows_linker_wrapper(ctx: AnalysisContext, linker: cmd_args) -> cmd_args: - # Linkers pretty much all support @file.txt argument syntax to insert - # arguments from the given text file, usually formatted one argument per - # line. - # - # - GNU ld: https://gcc.gnu.org/onlinedocs/gcc/Overall-Options.html - # - lld is command line compatible with GNU ld - # - MSVC link.exe: https://learn.microsoft.com/en-us/cpp/build/reference/linking?view=msvc-170#link-command-files - # - # However, there is inconsistency in whether they support nesting of @file - # arguments inside of another @file. - # - # We wrap the linker to flatten @file arguments down to 1 level of nesting. - return cmd_script( - ctx = ctx, - name = "windows_linker", - cmd = cmd_args( - ctx.attrs.linker_wrapper[RunInfo], - linker, - ), - os = ScriptOs("windows"), - ) +def _run_info(args): + return None if args == None else RunInfo(args = [args]) system_cxx_toolchain = rule( impl = _system_cxx_toolchain_impl, attrs = { "c_flags": attrs.list(attrs.string(), default = []), - "compiler": attrs.string(default = "cl.exe" if host_info().os.is_windows else "clang"), - "compiler_type": attrs.string(default = "windows" if host_info().os.is_windows else "clang"), # one of CxxToolProviderType + "compiler": attrs.option(attrs.string(), default = None), + "compiler_type": attrs.option(attrs.string(), default = None), # one of CxxToolProviderType "cpp_dep_tracking_mode": attrs.string(default = "makefile"), - "cvtres_compiler": attrs.string(default = "cvtres.exe"), + "cvtres_compiler": attrs.option(attrs.string(), default = None), "cvtres_flags": attrs.list(attrs.string(), default = []), - "cxx_compiler": attrs.string(default = "cl.exe" if host_info().os.is_windows else "clang++"), + "cxx_compiler": attrs.option(attrs.string(), default = None), "cxx_flags": attrs.list(attrs.string(), default = []), "link_flags": attrs.list(attrs.string(), default = []), "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "link_style": attrs.string(default = "shared"), - "linker": attrs.string(default = "link.exe" if host_info().os.is_windows else "clang++"), - "linker_wrapper": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//cxx/tools:linker_wrapper")), - "make_comp_db": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//cxx/tools:make_comp_db")), - "msvc_tools": attrs.default_only(attrs.exec_dep(providers = [VisualStudio], default = "prelude//toolchains/msvc:msvc_tools")), + "linker": attrs.option(attrs.string(), default = None), + "post_link_flags": attrs.list(attrs.string(), default = []), + "rc_compiler": attrs.option(attrs.string(), default = None), + "rc_flags": attrs.list(attrs.string(), default = []), + "_cxx_tools_info": attrs.exec_dep(providers = [CxxToolsInfo], default = "prelude//toolchains/msvc:msvc_tools" if host_info().os.is_windows else "prelude//toolchains/cxx/clang:path_clang_tools"), + "_internal_tools": attrs.default_only(attrs.exec_dep(providers = [CxxInternalTools], default = "prelude//cxx/tools:internal_tools")), + "_target_os_type": buck.target_os_type_arg(), + }, + is_toolchain_rule = True, +) + +cxx_tools_info_toolchain = rule( + impl = _cxx_tools_info_toolchain_impl, + attrs = { + "c_flags": attrs.list(attrs.string(), default = []), + "cpp_dep_tracking_mode": attrs.string(default = "makefile"), + "cvtres_flags": attrs.list(attrs.string(), default = []), + "cxx_flags": attrs.list(attrs.string(), default = []), + "cxx_tools_info": attrs.exec_dep(providers = [CxxToolsInfo], default = select({ + "DEFAULT": "prelude//toolchains/cxx/clang:path_clang_tools", + "config//os:windows": "prelude//toolchains/msvc:msvc_tools", + })), + "link_flags": attrs.list(attrs.string(), default = []), + "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), + "link_style": attrs.enum( + LinkStyle.values(), + default = "shared", + doc = """ + The default value of the `link_style` attribute for rules that use this toolchain. + """, + ), "post_link_flags": attrs.list(attrs.string(), default = []), - "rc_compiler": attrs.string(default = "rc.exe"), "rc_flags": attrs.list(attrs.string(), default = []), + "_internal_tools": attrs.default_only(attrs.exec_dep(providers = [CxxInternalTools], default = "prelude//cxx/tools:internal_tools")), + "_target_os_type": buck.target_os_type_arg(), }, is_toolchain_rule = True, ) diff --git a/prelude/toolchains/cxx/clang/BUCK b/prelude/toolchains/cxx/clang/BUCK new file mode 100644 index 00000000000..9a1337a444e --- /dev/null +++ b/prelude/toolchains/cxx/clang/BUCK @@ -0,0 +1,11 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") +load(":tools.bzl", "path_clang_tools") + +oncall("build_infra") + +source_listing() + +path_clang_tools( + name = "path_clang_tools", + visibility = ["PUBLIC"], +) diff --git a/prelude/toolchains/cxx/clang/tools.bzl b/prelude/toolchains/cxx/clang/tools.bzl new file mode 100644 index 00000000000..05f4a6a1b0d --- /dev/null +++ b/prelude/toolchains/cxx/clang/tools.bzl @@ -0,0 +1,32 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerType") +load("@prelude//toolchains:cxx.bzl", "CxxToolsInfo") + +def _path_clang_tools_impl(_ctx) -> list[Provider]: + return [ + DefaultInfo(), + CxxToolsInfo( + compiler = "clang", + compiler_type = "clang", + cxx_compiler = "clang++", + asm_compiler = "clang", + asm_compiler_type = "clang", + rc_compiler = None, + cvtres_compiler = None, + archiver = "ar", + archiver_type = "gnu", + linker = "clang++", + linker_type = LinkerType("gnu"), + ), + ] + +path_clang_tools = rule( + impl = _path_clang_tools_impl, + attrs = {}, +) diff --git a/prelude/toolchains/cxx/zig/BUCK b/prelude/toolchains/cxx/zig/BUCK deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/prelude/toolchains/cxx/zig/defs.bzl b/prelude/toolchains/cxx/zig/defs.bzl index 4b8a7ffd8ae..a8a98ac1aa4 100644 --- a/prelude/toolchains/cxx/zig/defs.bzl +++ b/prelude/toolchains/cxx/zig/defs.bzl @@ -32,7 +32,7 @@ the toolchain like so: `toolchains//BUILD` ```bzl -load("@prelude//toolchains/cxx:zig.bzl", "download_zig_distribution", "cxx_zig_toolchain") +load("@prelude//toolchains/cxx/zig:defs.bzl", "download_zig_distribution", "cxx_zig_toolchain") download_zig_distribution( name = "zig", @@ -50,7 +50,7 @@ To define toolchains for multiple platforms and configure cross-compilation you can configure the toolchain like so: ```bzl -load("@prelude//toolchains/cxx:zig.bzl", "download_zig_distribution", "cxx_zig_toolchain") +load("@prelude//toolchains/cxx/zig:defs.bzl", "download_zig_distribution", "cxx_zig_toolchain") download_zig_distribution( name = "zig-x86_64-linux", @@ -100,6 +100,7 @@ load( "BinaryUtilitiesInfo", "CCompilerInfo", "CxxCompilerInfo", + "CxxInternalTools", "LinkerInfo", "ShlibInterfacesMode", "StripFlagsInfo", @@ -127,8 +128,6 @@ load( "releases", ) -DEFAULT_MAKE_COMP_DB = "prelude//cxx/tools:make_comp_db" - ZigReleaseInfo = provider( # @unsorted-dict-items fields = { @@ -172,11 +171,18 @@ def _zig_distribution_impl(ctx: AnalysisContext) -> list[Provider]: dst = ctx.actions.declare_output("zig") path_tpl = "{}/" + ctx.attrs.prefix + "/zig" + ctx.attrs.suffix src = cmd_args(ctx.attrs.dist[DefaultInfo].default_outputs[0], format = path_tpl) - ctx.actions.run(["ln", "-sf", cmd_args(src).relative_to(dst, parent = 1), dst.as_output()], category = "cp_compiler") + ctx.actions.run( + ["ln", "-sf", cmd_args(src, relative_to = (dst, 1)), dst.as_output()], + category = "cp_compiler", + ) - compiler = cmd_args([dst]) - compiler.hidden(ctx.attrs.dist[DefaultInfo].default_outputs) - compiler.hidden(ctx.attrs.dist[DefaultInfo].other_outputs) + compiler = cmd_args( + [dst], + hidden = [ + ctx.attrs.dist[DefaultInfo].default_outputs, + ctx.attrs.dist[DefaultInfo].other_outputs, + ], + ) return [ ctx.attrs.dist[DefaultInfo], @@ -222,13 +228,15 @@ def _http_archive_impl(ctx: AnalysisContext) -> list[Provider]: [ cmd_args(output, format = "mkdir -p {}"), cmd_args(output, format = "cd {}"), - cmd_args(flags, archive, delimiter = " ").relative_to(output), + cmd_args(flags, archive, delimiter = " ", relative_to = output), ], is_executable = True, allow_args = True, ) - ctx.actions.run(cmd_args(["/bin/sh", script]) - .hidden([archive, output.as_output()]), category = "http_archive") + ctx.actions.run( + cmd_args(["/bin/sh", script], hidden = [archive, output.as_output()]), + category = "http_archive", + ) return [DefaultInfo(default_output = output)] @@ -339,6 +347,7 @@ def _cxx_zig_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: os = ScriptOs("windows" if dist.os == "windows" else "unix"), ) return [ctx.attrs.distribution[DefaultInfo]] + cxx_toolchain_infos( + internal_tools = ctx.attrs._cxx_internal_tools[CxxInternalTools], platform_name = dist.arch, c_compiler_info = CCompilerInfo( compiler = RunInfo(args = cmd_args(zig_cc)), @@ -376,7 +385,6 @@ def _cxx_zig_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: #lto_mode = None, # TODO support LTO object_file_extension = "o", #mk_shlib_intf = None, # not needed if shlib_interfaces = "disabled" - produce_interface_from_stub_shared_library = True, shlib_interfaces = ShlibInterfacesMode("disabled"), shared_dep_runtime_ld_flags = ctx.attrs.shared_dep_runtime_ld_flags, shared_library_name_default_prefix = "lib", @@ -408,7 +416,6 @@ def _cxx_zig_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: #as_compiler_info = None, #hip_compiler_info = None, #cuda_compiler_info = None, - mk_comp_db = ctx.attrs.make_comp_db, #mk_hmap = None, #use_distributed_thinlto = False, #use_dep_files = False, # requires dep_files_processor @@ -430,9 +437,14 @@ cxx_zig_toolchain = rule( "cxx_compiler_flags": attrs.list(attrs.arg(), default = []), "cxx_preprocessor_flags": attrs.list(attrs.arg(), default = []), "distribution": attrs.exec_dep(providers = [RunInfo, ZigDistributionInfo]), - "link_style": attrs.enum(LinkStyle.values(), default = "static"), + "link_style": attrs.enum( + LinkStyle.values(), + default = "static", + doc = """ + The default value of the `link_style` attribute for rules that use this toolchain. + """, + ), "linker_flags": attrs.list(attrs.arg(), default = []), - "make_comp_db": attrs.dep(providers = [RunInfo], default = DEFAULT_MAKE_COMP_DB), "shared_dep_runtime_ld_flags": attrs.list(attrs.arg(), default = []), "shared_library_interface_flags": attrs.list(attrs.string(), default = []), "static_dep_runtime_ld_flags": attrs.list(attrs.arg(), default = []), @@ -441,6 +453,7 @@ cxx_zig_toolchain = rule( "strip_debug_flags": attrs.option(attrs.list(attrs.arg()), default = None), "strip_non_global_flags": attrs.option(attrs.list(attrs.arg()), default = None), "target": attrs.option(attrs.string(), default = None), + "_cxx_internal_tools": attrs.default_only(attrs.dep(providers = [CxxInternalTools], default = "prelude//cxx/tools:internal_tools")), }, is_toolchain_rule = True, ) diff --git a/prelude/toolchains/demo.bzl b/prelude/toolchains/demo.bzl index 17ee3f5e7a4..7448804a32c 100644 --- a/prelude/toolchains/demo.bzl +++ b/prelude/toolchains/demo.bzl @@ -7,7 +7,7 @@ load("@prelude//toolchains:cxx.bzl", "system_cxx_toolchain") load("@prelude//toolchains:genrule.bzl", "system_genrule_toolchain") -load("@prelude//toolchains:go.bzl", "system_go_toolchain") +load("@prelude//toolchains:go.bzl", "system_go_bootstrap_toolchain", "system_go_toolchain") load("@prelude//toolchains:haskell.bzl", "system_haskell_toolchain") load("@prelude//toolchains:ocaml.bzl", "system_ocaml_toolchain") load("@prelude//toolchains:python.bzl", "system_python_bootstrap_toolchain", "system_python_toolchain") @@ -34,6 +34,11 @@ def system_demo_toolchains(): visibility = ["PUBLIC"], ) + system_go_bootstrap_toolchain( + name = "go_bootstrap", + visibility = ["PUBLIC"], + ) + system_haskell_toolchain( name = "haskell", visibility = ["PUBLIC"], diff --git a/prelude/toolchains/go.bzl b/prelude/toolchains/go.bzl index 176203a632e..577463bcd57 100644 --- a/prelude/toolchains/go.bzl +++ b/prelude/toolchains/go.bzl @@ -6,9 +6,10 @@ # of this source tree. load("@prelude//go:toolchain.bzl", "GoToolchainInfo") +load("@prelude//go_bootstrap:go_bootstrap.bzl", "GoBootstrapToolchainInfo") load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") -def _system_go_toolchain_impl(ctx): +def go_platform() -> (str, str): arch = host_info().arch if arch.is_aarch64: go_arch = "arm64" @@ -27,8 +28,42 @@ def _system_go_toolchain_impl(ctx): else: fail("Unsupported go os: {}".format(os)) - script_os = ScriptOs("windows" if os.is_windows else "unix") - go = "go.exe" if os.is_windows else "go" + return go_os, go_arch + +def _system_go_bootstrap_toolchain_impl(ctx): + go_os, go_arch = go_platform() + + script_os = ScriptOs("windows" if go_os == "windows" else "unix") + go = "go.exe" if go_os == "windows" else "go" + + return [ + DefaultInfo(), + GoBootstrapToolchainInfo( + env_go_arch = go_arch, + env_go_os = go_os, + go = RunInfo(cmd_script(ctx, "go", cmd_args(go), script_os)), + go_wrapper = ctx.attrs.go_wrapper[RunInfo], + ), + ] + +system_go_bootstrap_toolchain = rule( + impl = _system_go_bootstrap_toolchain_impl, + doc = """Example system go toolchain rules (WIP). Usage: + system_go_bootstrap_toolchain( + name = "go_bootstrap", + visibility = ["PUBLIC"], + )""", + attrs = { + "go_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:go_wrapper_py")), + }, + is_toolchain_rule = True, +) + +def _system_go_toolchain_impl(ctx): + go_os, go_arch = go_platform() + + script_os = ScriptOs("windows" if go_os == "windows" else "unix") + go = "go.exe" if go_os == "windows" else "go" return [ DefaultInfo(), @@ -36,16 +71,13 @@ def _system_go_toolchain_impl(ctx): assembler = RunInfo(cmd_script(ctx, "asm", cmd_args(go, "tool", "asm"), script_os)), cgo = RunInfo(cmd_script(ctx, "cgo", cmd_args(go, "tool", "cgo"), script_os)), cgo_wrapper = ctx.attrs.cgo_wrapper[RunInfo], - compile_wrapper = ctx.attrs.compile_wrapper[RunInfo], concat_files = ctx.attrs.concat_files[RunInfo], compiler = RunInfo(cmd_script(ctx, "compile", cmd_args(go, "tool", "compile"), script_os)), cover = RunInfo(cmd_script(ctx, "cover", cmd_args(go, "tool", "cover"), script_os)), - cover_srcs = ctx.attrs.cover_srcs[RunInfo], - cxx_toolchain_for_linking = None, + default_cgo_enabled = False, env_go_arch = go_arch, env_go_os = go_os, external_linker_flags = [], - filter_srcs = ctx.attrs.filter_srcs[RunInfo], gen_stdlib_importcfg = ctx.attrs.gen_stdlib_importcfg[RunInfo], go = RunInfo(cmd_script(ctx, "go", cmd_args(go), script_os)), go_wrapper = ctx.attrs.go_wrapper[RunInfo], @@ -67,12 +99,9 @@ system_go_toolchain = rule( )""", attrs = { "cgo_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:cgo_wrapper")), - "compile_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:compile_wrapper")), - "concat_files": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:concat_files")), - "cover_srcs": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:cover_srcs")), - "filter_srcs": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:filter_srcs")), + "concat_files": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go_bootstrap/tools:go_concat_files")), "gen_stdlib_importcfg": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:gen_stdlib_importcfg")), - "go_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:go_wrapper")), + "go_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go_bootstrap/tools:go_go_wrapper")), }, is_toolchain_rule = True, ) diff --git a/prelude/toolchains/msvc/BUCK.v2 b/prelude/toolchains/msvc/BUCK.v2 index ed74363b496..42d4db5370d 100644 --- a/prelude/toolchains/msvc/BUCK.v2 +++ b/prelude/toolchains/msvc/BUCK.v2 @@ -1,18 +1,24 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") load(":tools.bzl", "find_msvc_tools") +oncall("build_infra") + +source_listing() + python_bootstrap_binary( name = "vswhere", main = "vswhere.py", - visibility = [], + visibility = ["PUBLIC"], ) python_bootstrap_binary( name = "run_msvc_tool", main = "run_msvc_tool.py", - visibility = [], + visibility = ["PUBLIC"], ) find_msvc_tools( name = "msvc_tools", + target_compatible_with = ["config//os:windows"], visibility = ["PUBLIC"], ) diff --git a/prelude/toolchains/msvc/tools.bzl b/prelude/toolchains/msvc/tools.bzl index 199065fb944..93ffa771d58 100644 --- a/prelude/toolchains/msvc/tools.bzl +++ b/prelude/toolchains/msvc/tools.bzl @@ -5,26 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerType") +load("@prelude//toolchains:cxx.bzl", "CxxToolsInfo") load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") -VisualStudio = provider( - # @unsorted-dict-items - fields = { - # cl.exe - "cl_exe": provider_field(typing.Any, default = None), - # cvtres.exe - "cvtres_exe": provider_field(typing.Any, default = None), - # lib.exe - "lib_exe": provider_field(typing.Any, default = None), - # ml64.exe - "ml64_exe": provider_field(typing.Any, default = None), - # link.exe - "link_exe": provider_field(typing.Any, default = None), - # rc.exe - "rc_exe": provider_field(typing.Any, default = None), - }, -) - def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: cl_exe_json = ctx.actions.declare_output("cl.exe.json") cvtres_exe_json = ctx.actions.declare_output("cvtres.exe.json") @@ -50,42 +34,53 @@ def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: ) run_msvc_tool = ctx.attrs.run_msvc_tool[RunInfo] - cl_exe_script = cmd_script( - ctx = ctx, - name = "cl", - cmd = cmd_args(run_msvc_tool, cl_exe_json), - os = ScriptOs("windows"), - ) - cvtres_exe_script = cmd_script( - ctx = ctx, - name = "cvtres", - cmd = cmd_args(run_msvc_tool, cvtres_exe_json), - os = ScriptOs("windows"), - ) - lib_exe_script = cmd_script( - ctx = ctx, - name = "lib", - cmd = cmd_args(run_msvc_tool, lib_exe_json), - os = ScriptOs("windows"), - ) - ml64_exe_script = cmd_script( - ctx = ctx, - name = "ml64", - cmd = cmd_args(run_msvc_tool, ml64_exe_json), - os = ScriptOs("windows"), - ) - link_exe_script = cmd_script( - ctx = ctx, - name = "link", - cmd = cmd_args(run_msvc_tool, link_exe_json), - os = ScriptOs("windows"), - ) - rc_exe_script = cmd_script( - ctx = ctx, - name = "rc", - cmd = cmd_args(run_msvc_tool, rc_exe_json), - os = ScriptOs("windows"), - ) + if ctx.attrs.use_path_compilers: + cl_exe_script = "cl.exe" + ml64_exe_script = "ml64.exe" + rc_exe_script = "rc.exe" + cvtres_exe_script = "cvtres.exe" + else: + cl_exe_script = cmd_script( + ctx = ctx, + name = "cl", + cmd = cmd_args(run_msvc_tool, cl_exe_json), + os = ScriptOs("windows"), + ) + cvtres_exe_script = cmd_script( + ctx = ctx, + name = "cvtres", + cmd = cmd_args(run_msvc_tool, cvtres_exe_json), + os = ScriptOs("windows"), + ) + ml64_exe_script = cmd_script( + ctx = ctx, + name = "ml64", + cmd = cmd_args(run_msvc_tool, ml64_exe_json), + os = ScriptOs("windows"), + ) + rc_exe_script = cmd_script( + ctx = ctx, + name = "rc", + cmd = cmd_args(run_msvc_tool, rc_exe_json), + os = ScriptOs("windows"), + ) + + if ctx.attrs.use_path_linkers: + lib_exe_script = "lib.exe" + link_exe_script = "link.exe" + else: + lib_exe_script = cmd_script( + ctx = ctx, + name = "lib", + cmd = cmd_args(run_msvc_tool, lib_exe_json), + os = ScriptOs("windows"), + ) + link_exe_script = cmd_script( + ctx = ctx, + name = "link", + cmd = cmd_args(run_msvc_tool, link_exe_json), + os = ScriptOs("windows"), + ) return [ # Supports `buck2 run prelude//toolchains/msvc:msvc_tools[cl.exe]` @@ -128,20 +123,51 @@ def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: }), ], }), - VisualStudio( - cl_exe = cl_exe_script, - cvtres_exe = cvtres_exe_script, - lib_exe = lib_exe_script, - ml64_exe = ml64_exe_script, - link_exe = link_exe_script, - rc_exe = rc_exe_script, + CxxToolsInfo( + compiler = cl_exe_script, + compiler_type = "windows", + cxx_compiler = cl_exe_script, + asm_compiler = ml64_exe_script, + asm_compiler_type = "windows_ml64", + rc_compiler = rc_exe_script, + cvtres_compiler = cvtres_exe_script, + archiver = lib_exe_script, + archiver_type = "windows", + linker = _windows_linker_wrapper(ctx, link_exe_script), + linker_type = LinkerType("windows"), ), ] +def _windows_linker_wrapper(ctx: AnalysisContext, linker: [cmd_args, str]) -> cmd_args: + # Linkers pretty much all support @file.txt argument syntax to insert + # arguments from the given text file, usually formatted one argument per + # line. + # + # - GNU ld: https://gcc.gnu.org/onlinedocs/gcc/Overall-Options.html + # - lld is command line compatible with GNU ld + # - MSVC link.exe: https://learn.microsoft.com/en-us/cpp/build/reference/linking?view=msvc-170#link-command-files + # + # However, there is inconsistency in whether they support nesting of @file + # arguments inside of another @file. + # + # We wrap the linker to flatten @file arguments down to 1 level of nesting. + return cmd_script( + ctx = ctx, + name = "windows_linker", + cmd = cmd_args( + ctx.attrs.linker_wrapper[RunInfo], + linker, + ), + os = ScriptOs("windows"), + ) + find_msvc_tools = rule( impl = _find_msvc_tools_impl, attrs = { + "linker_wrapper": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//cxx/tools:linker_wrapper")), "run_msvc_tool": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//toolchains/msvc:run_msvc_tool")), + "use_path_compilers": attrs.bool(default = False), + "use_path_linkers": attrs.bool(default = False), "vswhere": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//toolchains/msvc:vswhere")), }, ) diff --git a/prelude/toolchains/msvc/vswhere.py b/prelude/toolchains/msvc/vswhere.py index f007d74150f..d56dfa6665c 100644 --- a/prelude/toolchains/msvc/vswhere.py +++ b/prelude/toolchains/msvc/vswhere.py @@ -75,6 +75,7 @@ def find_with_vswhere_exe(): "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", "-format", "json", + "-utf8", "-nologo", ], encoding="utf-8", @@ -189,7 +190,7 @@ def get_sdk10_dir(): windows_sdk_dir = os.environ.get("WindowsSdkDir") windows_sdk_version = os.environ.get("WindowsSDKVersion") if windows_sdk_dir is not None and windows_sdk_version is not None: - return windows_sdk_dir, windows_sdk_version.removesuffix("\\") + return Path(windows_sdk_dir), windows_sdk_version.removesuffix("\\") registry = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) key_name = "SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows\\v10.0" @@ -309,7 +310,7 @@ def main(): cl_exe, cvtres_exe, lib_exe, ml64_exe, link_exe = ( find_in_path(exe) for exe in VC_EXE_NAMES ) - rc_exe = find_in_path("rc.exe", optional=True) + rc_exe = find_in_path("rc.exe", is_optional=True) elif "EWDKDIR" in os.environ: cl_exe, cvtres_exe, lib_exe, ml64_exe, link_exe, rc_exe = find_with_ewdk( Path(os.environ["EWDKDIR"]) diff --git a/prelude/toolchains/python.bzl b/prelude/toolchains/python.bzl index 01a36f71f39..16400385b0d 100644 --- a/prelude/toolchains/python.bzl +++ b/prelude/toolchains/python.bzl @@ -57,7 +57,10 @@ def _system_python_toolchain_impl(ctx): return [ DefaultInfo(), PythonToolchainInfo( + binary_linker_flags = ctx.attrs.binary_linker_flags, + linker_flags = ctx.attrs.linker_flags, fail_with_message = ctx.attrs.fail_with_message[RunInfo], + generate_static_extension_info = ctx.attrs.generate_static_extension_info, make_source_db = ctx.attrs.make_source_db[RunInfo], make_source_db_no_deps = ctx.attrs.make_source_db_no_deps[RunInfo], host_interpreter = RunInfo(args = [ctx.attrs.interpreter]), @@ -76,8 +79,11 @@ def _system_python_toolchain_impl(ctx): system_python_toolchain = rule( impl = _system_python_toolchain_impl, attrs = { + "binary_linker_flags": attrs.default_only(attrs.list(attrs.arg(), default = [])), "fail_with_message": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:fail_with_message")), + "generate_static_extension_info": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:generate_static_extension_info")), "interpreter": attrs.string(default = _INTERPRETER), + "linker_flags": attrs.default_only(attrs.list(attrs.arg(), default = [])), "make_py_package_inplace": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:make_py_package_inplace")), "make_py_package_modules": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:make_py_package_modules")), "make_source_db": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:make_source_db")), diff --git a/prelude/toolchains/remote_test_execution.bzl b/prelude/toolchains/remote_test_execution.bzl index 5f2f382f159..39479f0623c 100644 --- a/prelude/toolchains/remote_test_execution.bzl +++ b/prelude/toolchains/remote_test_execution.bzl @@ -10,10 +10,17 @@ load("@prelude//tests:remote_test_execution_toolchain.bzl", "RemoteTestExecution load("@prelude//utils:utils.bzl", "map_val") def _impl(ctx: AnalysisContext) -> list[Provider]: + default_profile = map_val(ctx.attrs.profiles.get, ctx.attrs.default_profile) + if ctx.attrs.default_run_as_bundle != None: + default_run_as_bundle = ctx.attrs.default_run_as_bundle + else: + default_run_as_bundle = bool(default_profile) + return [ DefaultInfo(), RemoteTestExecutionToolchainInfo( - default_profile = map_val(ctx.attrs.profiles.get, ctx.attrs.default_profile), + default_profile = default_profile, + default_run_as_bundle = default_run_as_bundle, profiles = ctx.attrs.profiles, ), ] @@ -23,6 +30,7 @@ remote_test_execution_toolchain = rule( is_toolchain_rule = True, attrs = { "default_profile": attrs.option(attrs.string(), default = None), + "default_run_as_bundle": attrs.option(attrs.bool(), default = None), "profiles": attrs.dict( key = attrs.string(), value = attrs.option(re_test_common.opts_for_tests_arg()), diff --git a/prelude/toolchains/rust.bzl b/prelude/toolchains/rust.bzl index 8a5135963f8..3860f821313 100644 --- a/prelude/toolchains/rust.bzl +++ b/prelude/toolchains/rust.bzl @@ -46,13 +46,11 @@ def _system_rust_toolchain_impl(ctx): panic_runtime = PanicRuntime("unwind"), deny_lints = ctx.attrs.deny_lints, doctests = ctx.attrs.doctests, - extern_html_root_url_prefix = ctx.attrs.extern_html_root_url_prefix, failure_filter_action = ctx.attrs.failure_filter_action[RunInfo], - pipelined = ctx.attrs.pipelined, + nightly_features = ctx.attrs.nightly_features, report_unused_deps = ctx.attrs.report_unused_deps, rustc_action = ctx.attrs.rustc_action[RunInfo], rustc_binary_flags = ctx.attrs.rustc_binary_flags, - rustc_check_flags = ctx.attrs.rustc_check_flags, rustc_flags = ctx.attrs.rustc_flags, rustc_target_triple = ctx.attrs.rustc_target_triple, rustc_test_flags = ctx.attrs.rustc_test_flags, @@ -73,11 +71,9 @@ system_rust_toolchain = rule( "default_edition": attrs.option(attrs.string(), default = None), "deny_lints": attrs.list(attrs.string(), default = []), "doctests": attrs.bool(default = False), - "extern_html_root_url_prefix": attrs.option(attrs.string(), default = None), - "pipelined": attrs.bool(default = True), + "nightly_features": attrs.bool(default = False), "report_unused_deps": attrs.bool(default = False), "rustc_binary_flags": attrs.list(attrs.string(), default = []), - "rustc_check_flags": attrs.list(attrs.string(), default = []), "rustc_flags": attrs.list(attrs.string(), default = []), "rustc_target_triple": attrs.string(default = _DEFAULT_TRIPLE), "rustc_test_flags": attrs.list(attrs.string(), default = []), diff --git a/prelude/transitions/constraint_overrides.bzl b/prelude/transitions/constraint_overrides.bzl index e8108b804bb..f336604404f 100644 --- a/prelude/transitions/constraint_overrides.bzl +++ b/prelude/transitions/constraint_overrides.bzl @@ -20,6 +20,22 @@ _CONSTRAINTS = [ def _constr_eq(a, b): return a.label == b.label +# It's possible that we multiple constraints for the same setting, so drop all +# but the last one. +def _dedupe(constraints): + deduped = [] + + # Walk the constraints in reverse, so that the last one trakes precedence. + settings = {} + for constraint in reversed(constraints): + if constraint.setting.label in settings: + # we've already seen this setting + continue + settings[constraint.setting.label] = None + deduped.append(constraint) + + return reversed(deduped) + def _constraint_overrides_transition_impl( platform: PlatformInfo, refs: struct, @@ -30,6 +46,9 @@ def _constraint_overrides_transition_impl( for constraint in attrs.constraint_overrides ] + # Filter out redundant constraints. + new_constraints = _dedupe(new_constraints) + # Filter out new constraints which are already a part of the platform. new_constraints = [ constraint diff --git a/prelude/transitions/utils.bzl b/prelude/transitions/utils.bzl new file mode 100644 index 00000000000..5899fd1ad8a --- /dev/null +++ b/prelude/transitions/utils.bzl @@ -0,0 +1,21 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +def filtered_platform_constraints(platform: PlatformInfo, constraint_settings_labels_to_remove: list[TargetLabel]) -> dict[TargetLabel, ConstraintValueInfo]: + return { + constraint_setting_label: constraint_setting_value + for (constraint_setting_label, constraint_setting_value) in platform.configuration.constraints.items() + if constraint_setting_label not in constraint_settings_labels_to_remove + } + +def get_constraint_value(platform: PlatformInfo, constraint: ConstraintSettingInfo) -> [None, ConstraintValueInfo]: + return platform.configuration.constraints.get(constraint.label) + +utils = { + "filtered_platform_constraints": filtered_platform_constraints, + "get_constraint_value": get_constraint_value, +} diff --git a/prelude/unix/providers.bzl b/prelude/unix/providers.bzl new file mode 100644 index 00000000000..5378cc2f218 --- /dev/null +++ b/prelude/unix/providers.bzl @@ -0,0 +1,60 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:artifacts.bzl", "ArtifactExt") +load("@prelude//linking:shared_libraries.bzl", "SharedLibraries") +load("@prelude//python:manifest.bzl", "ManifestInfo") +load("@prelude//python:python.bzl", "PythonLibraryInfo") + +# Provider representing components that can be added to a "unix" env (e.g. +# binaries in `bin/`, native libs in `lib/`, and Python modules under +# `lib/python*/site-packages`). +UnixEnv = record( + label = field(Label), + # Third-party builds to install into the env (non-transitive). + third_party_builds = field(list[ArtifactExt], []), + # Python libraries to install (non-transitive). + python_libs = field(list[PythonLibraryInfo], []), + # Native libs to install (non-transitive). + native_libs = field(list[SharedLibraries], []), + # Binaries to install. + binaries = field(list[ManifestInfo], []), + # Raw paths to install. + paths = field(list[(str, ArtifactExt)], []), + patterns = field(list[(str, ArtifactExt, str)], []), +) + +UnixEnvTSet = transitive_set() + +UnixEnvInfo = provider( + fields = dict( + _tset = provider_field(UnixEnvTSet), + ), +) + +def create_unix_env_info( + actions: AnalysisActions, + env: UnixEnv | None = None, + children: list[UnixEnvInfo] = [], + deps: list[Dependency] = []) -> UnixEnvInfo: + all_children = [] + for child in children: + all_children.append(child._tset) + for dep in deps: + child = dep.get(UnixEnvInfo) + if child != None: + all_children.append(child._tset) + kwargs = {} + if env != None: + kwargs["value"] = env + kwargs["children"] = all_children + return UnixEnvInfo( + _tset = actions.tset( + UnixEnvTSet, + **kwargs + ), + ) diff --git a/prelude/user/all.bzl b/prelude/user/all.bzl index 2fd2dbb0958..d00eb145dea 100644 --- a/prelude/user/all.bzl +++ b/prelude/user/all.bzl @@ -5,14 +5,21 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//android/user:android_emulators.bzl", _android_emulators_spec = "registration_spec") +load("@prelude//apple:apple_resource_dedupe_alias.bzl", _apple_resource_dedupe_alias_spec = "registration_spec") +load("@prelude//apple:apple_static_archive.bzl", _apple_static_archive_spec = "registration_spec") +load("@prelude//apple/mockingbird:mockingbird_mock.bzl", _mockingbird_mock_spec = "registration_spec") +load("@prelude//apple/user:apple_ipa_package.bzl", _apple_ipa_package_spec = "registration_spec") +load("@prelude//apple/user:apple_macos_bundle.bzl", _apple_macos_bundle_spec = "registration_spec") load("@prelude//apple/user:apple_resource_bundle.bzl", _apple_resource_bundle_spec = "registration_spec") load("@prelude//apple/user:apple_selective_debugging.bzl", _apple_selective_debugging_spec = "registration_spec") load("@prelude//apple/user:apple_simulators.bzl", _apple_simulators_spec = "registration_spec") load("@prelude//apple/user:apple_toolchain_override.bzl", _apple_toolchain_override_spec = "registration_spec") load("@prelude//apple/user:apple_tools.bzl", _apple_tools_spec = "registration_spec") load("@prelude//apple/user:apple_watchos_bundle.bzl", _apple_watchos_bundle_spec = "registration_spec") +load("@prelude//apple/user:apple_xcframework.bzl", _apple_xcframework_spec = "registration_spec") load("@prelude//apple/user:resource_group_map.bzl", _resource_group_map_spec = "registration_spec") -load("@prelude//cxx/user:cxx_toolchain_override.bzl", _cxx_toolchain_override_inheriting_target_platform_spec = "cxx_toolchain_override_inheriting_target_platform_registration_spec", _cxx_toolchain_override_spec = "cxx_toolchain_override_registration_spec") +load("@prelude//cxx/user:cxx_toolchain_override.bzl", _cxx_toolchain_override_spec = "cxx_toolchain_override_registration_spec") load("@prelude//cxx/user:link_group_map.bzl", _link_group_map_spec = "registration_spec") load(":cxx_headers_bundle.bzl", _cxx_headers_bundle_spec = "registration_spec") load(":extract_archive.bzl", _extract_archive_spec = "registration_spec") @@ -20,18 +27,24 @@ load(":write_file.bzl", _write_file_spec = "registration_spec") _all_specs = [ _extract_archive_spec, + _android_emulators_spec, _apple_tools_spec, _apple_selective_debugging_spec, + _apple_static_archive_spec, _apple_resource_bundle_spec, + _apple_resource_dedupe_alias_spec, + _apple_xcframework_spec, + _apple_ipa_package_spec, _link_group_map_spec, _resource_group_map_spec, _apple_watchos_bundle_spec, + _apple_macos_bundle_spec, _apple_toolchain_override_spec, _cxx_headers_bundle_spec, _cxx_toolchain_override_spec, - _cxx_toolchain_override_inheriting_target_platform_spec, _apple_simulators_spec, _write_file_spec, + _mockingbird_mock_spec, ] rules = { diff --git a/prelude/user/extract_archive.bzl b/prelude/user/extract_archive.bzl index a7c76731e6c..a65107e05d0 100644 --- a/prelude/user/extract_archive.bzl +++ b/prelude/user/extract_archive.bzl @@ -20,13 +20,15 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: [ cmd_args(output, format = "mkdir -p {}"), cmd_args(output, format = "cd {}"), - cmd_args(archive, format = "tar -xzf {}").relative_to(output), + cmd_args(archive, format = "tar -xzf {}", relative_to = output), ], is_executable = True, allow_args = True, ) - ctx.actions.run(cmd_args(["/bin/sh", script]) - .hidden([archive, output.as_output()]), category = "extract_archive") + ctx.actions.run( + cmd_args(["/bin/sh", script], hidden = [archive, output.as_output()]), + category = "extract_archive", + ) return [DefaultInfo(default_output = output)] diff --git a/prelude/user/rule_spec.bzl b/prelude/user/rule_spec.bzl index 426b57d3f7e..ad8b45113d2 100644 --- a/prelude/user/rule_spec.bzl +++ b/prelude/user/rule_spec.bzl @@ -9,7 +9,8 @@ RuleRegistrationSpec = record( name = field(str), impl = field(typing.Callable), attrs = field(dict[str, Attr]), - cfg = field([None, "transition"], None), + # TODO(nga): should be `transition | None`, but `transition` does not work as type. + cfg = field(typing.Any | None, None), is_toolchain_rule = field(bool, False), doc = field(str, ""), ) diff --git a/prelude/utils/argfile.bzl b/prelude/utils/argfile.bzl new file mode 100644 index 00000000000..6512a8ed95f --- /dev/null +++ b/prelude/utils/argfile.bzl @@ -0,0 +1,43 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# Create an argument file. +# Return `cmd_args` which is single string containing `@path/to/argfile`. +# Returned `cmd_args` contains given files as hidden artifacts. +def at_argfile( + *, + # ctx.actions + actions, + # name of the argument file + name: str | Artifact, + # the arguments to write to the argument file + args, + # pass to `ctx.actions.write` + allow_args: bool = False) -> cmd_args: + if allow_args: + args_file, _ = actions.write(name, args, allow_args = True, with_inputs = True) + else: + args_file = actions.write(name, args, with_inputs = True) + return cmd_args(args_file, format = "@{}", hidden = args) + +# Write arguments to a file, and return the file path as `cmd_args` +# with args attached as hidden artifacts. +def argfile( + *, + # ctx.actions + actions, + # name of the argument file + name: str | Artifact, + # the arguments to write to the argument file + args, + # pass to `ctx.actions.write` + allow_args: bool = False) -> cmd_args: + if allow_args: + args_file, _ = actions.write(name, args, allow_args = True, with_inputs = True) + else: + args_file = actions.write(name, args, with_inputs = True) + return cmd_args(args_file, hidden = args) diff --git a/prelude/utils/buckconfig.bzl b/prelude/utils/buckconfig.bzl index bc03a75c49b..8d747bd09d4 100644 --- a/prelude/utils/buckconfig.bzl +++ b/prelude/utils/buckconfig.bzl @@ -188,4 +188,4 @@ def resolve_alias(alias): return target else: alias = target - fail("This should never happen - either the alias exists or it doesnt") + fail("This should never happen - either the alias exists or it doesn't") diff --git a/prelude/utils/build_target_pattern.bzl b/prelude/utils/build_target_pattern.bzl index 3d97932c5bd..5d9dd962978 100644 --- a/prelude/utils/build_target_pattern.bzl +++ b/prelude/utils/build_target_pattern.bzl @@ -5,9 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//utils:expect.bzl", "expect") - -_ROOT_SYMBOL = "//" +ROOT_SYMBOL = "//" _TARGET_SYMBOL = ":" _RECURSIVE_SYMBOL = "..." _PATH_SYMBOL = "/" @@ -25,13 +23,30 @@ BuildTargetPattern = record( name = field([str, None], None), matches = field(typing.Callable), as_string = field(typing.Callable), + + # Exists purely for optimisation purposes. + # Matching pattern inside a loop for many targets creates huge amount of + # unnecessary string allocations that we can avoid + _path_with_path_symbol = field(str), ) -def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: - expect(len(pattern) >= len(_ROOT_SYMBOL) + 1, "Invalid build target pattern, pattern too short: {}".format(pattern)) +BuildTargetPatternParseResult = record( + build_target_pattern = field([BuildTargetPattern, None], None), + error = field([str, None], default = None), +) - root_position = pattern.find(_ROOT_SYMBOL) - expect(root_position >= 0, "Invalid build target pattern, pattern should started with `{}` or a cell name followed by `{}`: ".format(_ROOT_SYMBOL, _ROOT_SYMBOL, pattern)) +def try_parse_build_target_pattern(pattern: str) -> BuildTargetPatternParseResult: + """ + This function try to parse build target pattern. If parse fails, it will return the error message. + """ + if not (len(pattern) >= len(ROOT_SYMBOL) + 1): + err_msg = "Invalid build target pattern, pattern too short: {}".format(pattern) + return BuildTargetPatternParseResult(error = err_msg) + + root_position = pattern.find(ROOT_SYMBOL) + if not (root_position >= 0): + err_msg = "Invalid build target pattern, pattern should started with `{}` or a cell name followed by `{}`: ".format(ROOT_SYMBOL, ROOT_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) cell = None if root_position > 0: @@ -44,7 +59,9 @@ def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: elif pattern.endswith(_RECURSIVE_SYMBOL): kind = _BuildTargetPatternKind("recursive") end_of_path_position = len(pattern) - len(_RECURSIVE_SYMBOL) - 1 - expect(pattern[end_of_path_position] == _PATH_SYMBOL, "Invalid build target pattern, `{}` should be preceded by a `{}`: {}".format(_RECURSIVE_SYMBOL, _PATH_SYMBOL, pattern)) + if not (pattern[end_of_path_position] == _PATH_SYMBOL): + err_msg = "Invalid build target pattern, `{}` should be preceded by a `{}`: {}".format(_RECURSIVE_SYMBOL, _PATH_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) else: kind = _BuildTargetPatternKind("single") end_of_path_position = pattern.rfind(_TARGET_SYMBOL) @@ -55,19 +72,30 @@ def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: start_of_package = pattern.rfind(_PATH_SYMBOL) name = pattern[start_of_package + len(_PATH_SYMBOL):] elif end_of_path_position < root_position: - fail("Invalid build target pattern, cell name should not contain `{}`: {}".format(_PATH_SYMBOL, pattern)) + err_msg = "Invalid build target pattern, cell name should not contain `{}`: {}".format(_PATH_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) else: name = pattern[end_of_path_position + len(_TARGET_SYMBOL):] - start_of_path_position = root_position + len(_ROOT_SYMBOL) + start_of_path_position = root_position + len(ROOT_SYMBOL) - expect(pattern[start_of_path_position] != _PATH_SYMBOL, "Invalid build target pattern, path cannot start with `{}`: {}".format(_PATH_SYMBOL, pattern)) + if not (pattern[start_of_path_position] != _PATH_SYMBOL): + err_msg = "Invalid build target pattern, path cannot start with `{}`: {}".format(_PATH_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) path = pattern[start_of_path_position:end_of_path_position] - expect(path.find(_ROOT_SYMBOL) < 0, "Invalid build target pattern, `{}` can only appear once: {}".format(_ROOT_SYMBOL, pattern)) - expect(path.find(_RECURSIVE_SYMBOL) < 0, "Invalid build target pattern, `{}` can only appear once: {}".format(_RECURSIVE_SYMBOL, pattern)) - expect(path.find(_TARGET_SYMBOL) < 0, "Invalid build target pattern, `{}` can only appear once: {}".format(_TARGET_SYMBOL, pattern)) - expect(len(path) == 0 or path[-1:] != _PATH_SYMBOL, "Invalid build target pattern, path cannot end with `{}`: {}".format(_PATH_SYMBOL, pattern)) + if not (path.find(ROOT_SYMBOL) < 0): + err_msg = "Invalid build target pattern, `{}` can only appear once: {}".format(ROOT_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) + if not (path.find(_RECURSIVE_SYMBOL) < 0): + err_msg = "Invalid build target pattern, `{}` can only appear once: {}".format(_RECURSIVE_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) + if not (path.find(_TARGET_SYMBOL) < 0): + err_msg = "Invalid build target pattern, `{}` can only appear once: {}".format(_TARGET_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) + if not (len(path) == 0 or path[-1:] != _PATH_SYMBOL): + err_msg = "Invalid build target pattern, path cannot end with `{}`: {}".format(_PATH_SYMBOL, pattern) + return BuildTargetPatternParseResult(error = err_msg) # buildifier: disable=uninitialized - self is initialized def matches(label: [Label, TargetLabel]) -> bool: @@ -85,7 +113,7 @@ def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: return True elif len(label.package) > path_pattern_length: # pattern cell//package/... matches label cell//package/subpackage:target - return label.package.startswith(self.path + _PATH_SYMBOL) + return label.package.startswith(self._path_with_path_symbol) else: return self.path == label.package else: @@ -99,10 +127,16 @@ def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: elif self.kind == _BuildTargetPatternKind("package"): return "{}//{}:".format(normalized_cell, self.path) elif self.kind == _BuildTargetPatternKind("recursive"): - return "{}//{}...".format(normalized_cell, self.path + _PATH_SYMBOL if self.path else "") + return "{}//{}...".format(normalized_cell, self._path_with_path_symbol) else: fail("Unknown build target pattern kind.") - self = BuildTargetPattern(kind = kind, cell = cell, path = path, name = name, matches = matches, as_string = as_string) + self = BuildTargetPattern(kind = kind, cell = cell, path = path, name = name, matches = matches, as_string = as_string, _path_with_path_symbol = path + _PATH_SYMBOL if path else "") + + return BuildTargetPatternParseResult(build_target_pattern = self) - return self +def parse_build_target_pattern(pattern: str) -> BuildTargetPattern: + parse_res = try_parse_build_target_pattern(pattern) + if parse_res.error != None: + fail(parse_res.error) + return parse_res.build_target_pattern diff --git a/prelude/utils/clear_platform.bzl b/prelude/utils/clear_platform.bzl new file mode 100644 index 00000000000..6e2c9dda56c --- /dev/null +++ b/prelude/utils/clear_platform.bzl @@ -0,0 +1,24 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# We don't want e.g. Apple simulator and Android emulator targets to be configured differently and handled as a different resource broker by buck2 core. +# By clearing the platform we make sure there is only a single configured target for each resource broker which manages resources of certain type. +def _transition_impl(platform: PlatformInfo, refs: struct) -> PlatformInfo: + # buildifier: disable=unused-variable + _ = (platform, refs) + return PlatformInfo( + label = "empty_platform", + configuration = ConfigurationInfo( + constraints = {}, + values = {}, + ), + ) + +clear_platform_transition = transition( + impl = _transition_impl, + refs = {}, +) diff --git a/prelude/utils/cmd_script.bzl b/prelude/utils/cmd_script.bzl index 3d8cd25a4bf..7a6c23ea890 100644 --- a/prelude/utils/cmd_script.bzl +++ b/prelude/utils/cmd_script.bzl @@ -52,4 +52,4 @@ def cmd_script( else: fail(os) - return cmd_args(wrapper).hidden(cmd) + return cmd_args(wrapper, hidden = cmd) diff --git a/prelude/utils/directory_fold.bzl b/prelude/utils/directory_fold.bzl new file mode 100644 index 00000000000..fbf2b31e487 --- /dev/null +++ b/prelude/utils/directory_fold.bzl @@ -0,0 +1,63 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# Given a list of files, return a tree structure with the shape: +# +# type Tree = dict[component, None | Tree] +# +# Where None indicates a file, and a Tree indicates a directory. +def _build_tree(files): + tree = {} + for file in files: + map = tree + + # For every python file, starting from distillery, figure out every subdirectory and add it to the map if it's not there already + components = file.split("/") + for directory_chunk in components[:-1]: + map = map.setdefault(directory_chunk, {}) + map[components[-1]] = None + + return tree + +def _reduce_tree(path, tree, directory): + files = [] + dirs = [] + for k, v in tree.items(): + path2 = path + ("/" if path else "") + k + if v == None: + files.append(path2) + else: + dirs.append(_reduce_tree(path2, v, directory)) + return directory(path, dirs, files) + +# Given a list of files, perform a reduction on the tree structure. +# The `directory` argument is a function that takes a path, a list of subdirectory results, and a list of files. +# For example, given the paths `foo/bar.txt` and `foo/baz.txt` it would be called thusly: +# +# directory("", [directory("foo", [], ["foo/bar.txt", "foo/baz.txt"])], []) +def directory_fold(files, directory): + return _reduce_tree("", _build_tree(files), directory) + +def _test_tree_functions(): + input = ["foo/bar/baz.txt", "foo/bar.txt", "foo.txt", "foo/bar/quux.txt", "foo/baz/quux.txt"] + output = { + "foo": { + "bar": {"baz.txt": None, "quux.txt": None}, + "bar.txt": None, + "baz": {"quux.txt": None}, + }, + "foo.txt": None, + } + result = _build_tree(input) + if result != output: + fail("_build_tree(), unexpected output. Wanted `{output}`, got `{tree}`".format(output = output, result = result)) + + original = directory_fold(input, lambda _name, dirs, files: files + [x for xs in dirs for x in xs]) + if sorted(original) != sorted(input): + fail("_directory_fold(), unexpected output. Wanted `{input}`, got `{original}`".format(input = input, original = original)) + +_test_tree_functions() diff --git a/prelude/utils/expect.bzl b/prelude/utils/expect.bzl index 7635ac6f5ef..889742519e3 100644 --- a/prelude/utils/expect.bzl +++ b/prelude/utils/expect.bzl @@ -20,6 +20,7 @@ load( "is_list", "is_number", "is_string", + "is_struct", ) def expect(condition: typing.Any, message: str = "condition not expected", *format_args): @@ -35,6 +36,15 @@ def expect(condition: typing.Any, message: str = "condition not expected", *form formatted_message = message.format(*format_args) fail(formatted_message) +def expect_equal(left: typing.Any, right: typing.Any, message: str | None = None, *format_args): + if left != right: + if message == None: + msg = "Expected values to be equal, but got '{}' and '{}' instead.".format(left, right) + fail(msg) + else: + formatted_message = message.format(*format_args) + fail(formatted_message) + def expect_non_none(val, msg: str = "unexpected none", *fmt_args, **fmt_kwargs): """ Require the given value not be `None`. @@ -43,7 +53,7 @@ def expect_non_none(val, msg: str = "unexpected none", *fmt_args, **fmt_kwargs): fail(msg.format(*fmt_args, **fmt_kwargs)) return val -def expect_type(name: str, check: typing.Callable, desc: str, val: typing.Any): +def expect_type(name: str, check: typing.Callable[[typing.Any], bool], desc: str, val: typing.Any): """Fails if check(val) if not truthy. name, desc are used for the error message. Usually you shouldn't need to directly use this, and prefer the expect_* family of functions @@ -111,6 +121,9 @@ def expect_contains_all(name, val, options): for index, val in enumerate(val): expect_contains("{name}[{index}]".format(name = name, index = index), val, options) +def expect_struct(name: str, val: struct): + expect_type(name, is_struct, "struct", val) + # You'll likely want to import this struct for convenience, instead of each method separately expects = struct( type = expect_type, @@ -126,4 +139,6 @@ expects = struct( collection = expect_collection, contains = expect_contains, contains_all = expect_contains_all, + equal = expect_equal, + struct = expect_struct, ) diff --git a/prelude/utils/graph_utils.bzl b/prelude/utils/graph_utils.bzl index 3fadf06f637..108edc562b4 100644 --- a/prelude/utils/graph_utils.bzl +++ b/prelude/utils/graph_utils.bzl @@ -9,7 +9,7 @@ load("@prelude//utils:expect.bzl", "expect") def pre_order_traversal( graph: dict[typing.Any, list[typing.Any]], - node_formatter: typing.Callable = str) -> list[typing.Any]: + node_formatter: typing.Callable[[typing.Any], str] = str) -> list[typing.Any]: """ Perform a pre-order (topologically sorted) traversal of `graph` and return the ordered nodes """ @@ -46,7 +46,7 @@ def pre_order_traversal( def post_order_traversal( graph: dict[typing.Any, list[typing.Any]], - node_formatter: typing.Callable = str) -> list[typing.Any]: + node_formatter: typing.Callable[[typing.Any], str] = str) -> list[typing.Any]: """ Performs a post-order traversal of `graph`. """ @@ -82,7 +82,7 @@ def post_order_traversal( def fail_cycle( graph: dict[typing.Any, list[typing.Any]], - node_formatter: typing.Callable) -> typing.Never: + node_formatter: typing.Callable[[typing.Any], str]) -> typing.Never: cycle = find_cycle(graph) if cycle: fail( @@ -166,52 +166,75 @@ def pre_order_traversal_by( ordered = post_order_traversal_by(roots, get_nodes_to_traverse_func) return ordered[::-1] -def breadth_first_traversal( +def depth_first_traversal( graph_nodes: dict[typing.Any, list[typing.Any]], roots: list[typing.Any]) -> list[typing.Any]: """ - Like `breadth_first_traversal_by` but the nodes are stored in the graph. + Like `depth_first_traversal_by` but the nodes are stored in the graph. """ def lookup(x): return graph_nodes[x] - return breadth_first_traversal_by(graph_nodes, roots, lookup) + return depth_first_traversal_by(graph_nodes, roots, lookup) -def breadth_first_traversal_by( +# With following graph +# +# A +# / \ +# B C +# / \ / \ +# D E F G +# +# preorder-left-to-right starting from A will go to left leg first +# A-B-D-E-C-F-G +# +# preorder-right-to-left starting from A will go to right leg first +# A-C-G-F-B-E-D +# +GraphTraversal = enum( + "preorder-right-to-left", + "preorder-left-to-right", +) + +def depth_first_traversal_by( graph_nodes: [dict[typing.Any, typing.Any], None], roots: list[typing.Any], get_nodes_to_traverse_func: typing.Callable, - node_formatter: typing.Callable = str) -> list[typing.Any]: + traversal: GraphTraversal = GraphTraversal("preorder-right-to-left"), + node_formatter: typing.Callable[[typing.Any], str] = str) -> list[typing.Any]: """ - Performs a breadth first traversal of `graph_nodes`, beginning - with the `roots` and queuing the nodes returned by`get_nodes_to_traverse_func`. + Performs a depth first traversal of `graph_nodes`, beginning + with the `roots` and queuing the nodes returned by `get_nodes_to_traverse_func`. Returns a list of all visisted nodes. get_nodes_to_traverse_func(node: '_a') -> ['_a']: Starlark does not offer while loops, so this implementation - must make use of a for loop. We pop from the end of the queue - as a matter of performance. + must make use of a for loop. """ # Dictify for O(1) lookup visited = {k: None for k in roots} + stride = -1 if traversal == GraphTraversal("preorder-left-to-right") else 1 - queue = visited.keys() + stack = [] + for node in visited.keys()[::stride]: + stack.append(node) for _ in range(len(graph_nodes) if graph_nodes else 2000000000): - if not queue: + if not stack: break - node = queue.pop() + node = stack.pop() if graph_nodes and node not in graph_nodes: fail("Expected node {} in graph nodes".format(node_formatter(node))) nodes_to_visit = get_nodes_to_traverse_func(node) - for node in nodes_to_visit: - if node not in visited: - visited[node] = None - queue.append(node) + if nodes_to_visit: + for node in nodes_to_visit[::stride]: + if node not in visited: + visited[node] = None + stack.append(node) - expect(not queue, "Expected to be done with graph traversal queue.") + expect(not stack, "Expected to be done with graph traversal stack.") return visited.keys() diff --git a/prelude/utils/pick.bzl b/prelude/utils/pick.bzl index e64a7513f8c..d6de059ae47 100644 --- a/prelude/utils/pick.bzl +++ b/prelude/utils/pick.bzl @@ -18,7 +18,7 @@ def pick_raw(override, underlying): return override if override != None else underlying def pick_and_add(override, additional, underlying): - flags = cmd_args(pick(override, underlying)) + flags = [pick(override, underlying)] if additional: - flags.add(additional) - return flags + flags.append(additional) + return cmd_args(flags) diff --git a/prelude/utils/set.bzl b/prelude/utils/set.bzl index 14fb5e2c8b6..d242f45bb55 100644 --- a/prelude/utils/set.bzl +++ b/prelude/utils/set.bzl @@ -52,9 +52,9 @@ def set(initial_entries: list[typing.Any] = []) -> set_type: def set_add(v: typing.Any) -> bool: if self.contains(v): - return True + return False self._entries[v] = None - return False + return True def set_contains(v: typing.Any) -> bool: return v in self._entries @@ -66,7 +66,7 @@ def set(initial_entries: list[typing.Any] = []) -> set_type: return False def set_update(values: list[typing.Any]) -> list[typing.Any]: - return filter(None, [v for v in values if not self.add(v)]) + return filter(None, [v for v in values if self.add(v)]) def set_size() -> int: return len(self._entries) diff --git a/prelude/utils/source_listing.bzl b/prelude/utils/source_listing.bzl new file mode 100644 index 00000000000..5ac49d4a966 --- /dev/null +++ b/prelude/utils/source_listing.bzl @@ -0,0 +1,13 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":source_listing_impl.bzl?v2_only", "SourceListingInfoAlias", "source_listing_impl") + +SourceListingInfo = SourceListingInfoAlias + +def source_listing(exclude = None): + source_listing_impl(exclude = exclude or []) diff --git a/prelude/utils/source_listing_impl.bzl b/prelude/utils/source_listing_impl.bzl new file mode 100644 index 00000000000..fae29ffffdb --- /dev/null +++ b/prelude/utils/source_listing_impl.bzl @@ -0,0 +1,48 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +SourceListingInfo = provider(fields = { + "sources": dict[str, Artifact], +}) + +SourceListingInfoAlias = SourceListingInfo + +def _impl(ctx): + sources = {} + for d in ctx.attrs.deps: + package = ctx.label.package + if package != "": + package += "/" + rel_loc = d.label.package.removeprefix(package) + sources.update({rel_loc + "/" + p: art for p, art in d[SourceListingInfo].sources.items()}) + + for s in ctx.attrs.srcs: + sources[s.short_path] = s + return [DefaultInfo(), SourceListingInfo(sources = sources)] + +# This rule acts sort of like a `filegroup`, except that 1) it returns all the +# source artifacts unchanged, and 2) it reports the location of all artifacts +# relative to the current package. We use this for gathering listings of the +# source files for bundled cells. +_source_listing = rule( + impl = _impl, + attrs = { + "deps": attrs.list(attrs.dep()), + "srcs": attrs.list(attrs.source()), + }, +) + +def source_listing_impl(exclude: list[str]): + package = package_name() + if package != "": + package += "/" + _source_listing( + name = "source_listing", + srcs = glob(["**/*", "**/.*"], exclude = exclude), + deps = ["//" + package + s + ":source_listing" for s in __internal__.sub_packages()], + visibility = ["PUBLIC"], + ) diff --git a/prelude/utils/type_defs.bzl b/prelude/utils/type_defs.bzl index 79ee96ba988..3ab5d21b5f9 100644 --- a/prelude/utils/type_defs.bzl +++ b/prelude/utils/type_defs.bzl @@ -88,7 +88,7 @@ def is_bool(arg): """Checks if provided instance is a boolean value. Args: - arg: An instance ot check. type: Any + arg: An instance of check. type: Any Returns: True for boolean values, False otherwise. rtype: bool @@ -101,7 +101,7 @@ def is_number(arg): """Checks if provided instance is a number value. Args: - arg: An instance ot check. type: Any + arg: An instance of check. type: Any Returns: True for number values, False otherwise. rtype: bool diff --git a/prelude/utils/utils.bzl b/prelude/utils/utils.bzl index cecc99d363d..658629fcae7 100644 --- a/prelude/utils/utils.bzl +++ b/prelude/utils/utils.bzl @@ -12,6 +12,12 @@ load("@prelude//utils:expect.bzl", "expect") def value_or(x: [None, typing.Any], default: typing.Any) -> typing.Any: return default if x == None else x +def values_or(*xs: typing.Any | None) -> typing.Any | None: + for x in xs: + if x != None: + return x + return None + # Flatten a list of lists into a list def flatten(xss: list[list[typing.Any]]) -> list[typing.Any]: return [x for xs in xss for x in xs] @@ -50,9 +56,6 @@ def from_named_set(srcs: [dict[str, Artifact | Dependency], list[Artifact | Depe def map_idx(key: typing.Any, vals: list[typing.Any]) -> list[typing.Any]: return [x[key] for x in vals] -def filter_idx(key: typing.Any, vals: list[typing.Any]) -> list[typing.Any]: - return [x for x in vals if key in x] - def filter_and_map_idx(key: typing.Any, vals: list[typing.Any]) -> list[typing.Any]: return [x[key] for x in vals if key in x] @@ -63,7 +66,7 @@ def idx(x: [typing.Any, None], key: typing.Any) -> [typing.Any, None]: def dedupe_by_value(vals: list[typing.Any]) -> list[typing.Any]: return {val: None for val in vals}.keys() -def map_val(func: typing.Callable, val: [typing.Any, None]) -> [typing.Any, None]: +def map_val(func: typing.Callable[[typing.Any], typing.Any], val: [typing.Any, None]) -> [typing.Any, None]: """ If `val` if `None`, return `None`, else apply `func` to `val` and return the result. diff --git a/prelude/validation/audit_results.bxl b/prelude/validation/audit_results.bxl new file mode 100644 index 00000000000..a86d5cad74b --- /dev/null +++ b/prelude/validation/audit_results.bxl @@ -0,0 +1,66 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +def _impl(ctx: bxl.Context): + if ctx.cli_args.transitive: + targets = ( + ctx.target_universe(ctx.cli_args.targets).universe_target_set() + ) + else: + targets = ctx.cli_args.targets + + info = {} + for target, analysis_result in ctx.analysis(targets).items(): + providers = analysis_result.providers() + validation_info = providers.get(ValidationInfo) + if not validation_info: + if not ctx.cli_args.trim: + info[target] = {} + continue + + spec_name_to_path = {} + for spec in validation_info.validations: + # Yes, I'm aware this is suboptimal. When running this script on + # large targets are Meta, there is no discernable regression to performance. + # + # Read the big ol' comment block below to understand why. + o = ctx.output.ensure(spec.validation_result) + spec_name_to_path[spec.name] = o.abs_path() + + info[target] = spec_name_to_path + + # We chose to print to stdout because we run into an issue with + # the ctx.bxl_actions().actions.write_json() API + # + # The goal is to output something into a file which looks like this: + # { + # "cell//some:target": { + # "spec_name": "path/to/materialized/output.json" + # + # Unfortuantely, if we use the actions.write_json() API, it requires us to pass + # `with_inputs = True` so we can be sure that we materialize the the paths to validation + # outputs with the JSON. + # + # Unfortunately, `ensured_artifact_group` has a limited API that doesn't allow us to + # only print a subset of the targets. While you can loop thru them, there is no way .owner + # API, you'd have to guess based on filepath. + # + # As a result, we ensure the artifacts as we iterate so we can get the materialized absolute + # path and not run into an invariant where you are not allowed to freeze EnsuredArtifacts. + ctx.output.print_json(info) + +main = bxl_main( + impl = _impl, + cli_args = { + "targets": cli_args.target_expr(), + "transitive": cli_args.bool(False), + "trim": cli_args.bool( + default = True, + doc = "By default, targets with no validations will be stripped from the output.", + ), + }, +) diff --git a/prelude/windows/tools/BUCK.v2 b/prelude/windows/tools/BUCK.v2 index bda1136989c..a8fc0187647 100644 --- a/prelude/windows/tools/BUCK.v2 +++ b/prelude/windows/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + prelude = native prelude.export_file( diff --git a/prelude/worker_tool.bzl b/prelude/worker_tool.bzl index 7b1bad6ceef..1cac6e32534 100644 --- a/prelude/worker_tool.bzl +++ b/prelude/worker_tool.bzl @@ -28,9 +28,9 @@ def worker_tool(ctx: AnalysisContext) -> list[Provider]: expect(worker_tool_run_info != None, "Worker tool executable must have a RunInfo!") worker_tool_runner = ctx.attrs._worker_tool_runner[RunInfo] - worker_tool_cmd = cmd_args(worker_tool_runner) - worker_tool_cmd.add("--worker-tool") - worker_tool_cmd.add(worker_tool_run_info) + worker_tool_cmd = [worker_tool_runner] + worker_tool_cmd.append("--worker-tool") + worker_tool_cmd.append(worker_tool_run_info) worker_args = ctx.attrs.args if worker_args: @@ -40,8 +40,8 @@ def worker_tool(ctx: AnalysisContext) -> list[Provider]: allow_args = True, ) - worker_tool_cmd.add("--worker-args-file") - worker_tool_cmd.add(worker_args_file) + worker_tool_cmd.append("--worker-args-file") + worker_tool_cmd.append(worker_args_file) worker_env = ctx.attrs.env if worker_env: @@ -56,9 +56,10 @@ def worker_tool(ctx: AnalysisContext) -> list[Provider]: allow_args = True, ) - worker_tool_cmd.add("--worker-env-file") - worker_tool_cmd.add(env_args_file) + worker_tool_cmd.append("--worker-env-file") + worker_tool_cmd.append(env_args_file) + worker_tool_cmd = cmd_args(worker_tool_cmd) return [ DefaultInfo(), RunInfo( diff --git a/prelude/zip_file/tools/BUCK.v2 b/prelude/zip_file/tools/BUCK.v2 index 36cc2b8b9a6..560920dfcd8 100644 --- a/prelude/zip_file/tools/BUCK.v2 +++ b/prelude/zip_file/tools/BUCK.v2 @@ -1,3 +1,9 @@ +load("@prelude//utils:source_listing.bzl", "source_listing") + +oncall("build_infra") + +source_listing() + python_bootstrap_binary( name = "unzip", main = "unzip.py", diff --git a/prelude/zip_file/zip_file.bzl b/prelude/zip_file/zip_file.bzl index 79b89021585..3cf3c8017c3 100644 --- a/prelude/zip_file/zip_file.bzl +++ b/prelude/zip_file/zip_file.bzl @@ -8,7 +8,7 @@ load("@prelude//decls/toolchains_common.bzl", "toolchains_common") load(":zip_file_toolchain.bzl", "ZipFileToolchainInfo") -def zip_file_impl(ctx: AnalysisContext) -> list[Provider]: +def _zip_file_impl(ctx: AnalysisContext) -> list[Provider]: """ zip_file() rule implementation @@ -29,42 +29,42 @@ def zip_file_impl(ctx: AnalysisContext) -> list[Provider]: zip_srcs = ctx.attrs.zip_srcs srcs = ctx.attrs.srcs - create_zip_cmd = cmd_args([ + create_zip_cmd = [ create_zip_tool, "--output_path", output.as_output(), "--on_duplicate_entry", on_duplicate_entry if on_duplicate_entry else "overwrite", - ]) + ] if srcs: - srcs_file_cmd = cmd_args() - # add artifact and is_source flag pair - for src in srcs: - srcs_file_cmd.add(src) - srcs_file_cmd.add(src.short_path) - srcs_file_cmd.add(str(src.is_source)) + srcs_file_cmd = cmd_args( + [ + [src, src.short_path, str(src.is_source)] + for src in srcs + ], + ) entries_file = ctx.actions.write("entries", srcs_file_cmd) - create_zip_cmd.add("--entries_file") - create_zip_cmd.add(entries_file) - create_zip_cmd.hidden(srcs) + create_zip_cmd.append("--entries_file") + create_zip_cmd.append(entries_file) + create_zip_cmd.append(cmd_args(hidden = srcs)) if zip_srcs: - create_zip_cmd.add("--zip_sources") - create_zip_cmd.add(zip_srcs) + create_zip_cmd.append("--zip_sources") + create_zip_cmd.append(zip_srcs) if entries_to_exclude: - create_zip_cmd.add("--entries_to_exclude") - create_zip_cmd.add(entries_to_exclude) + create_zip_cmd.append("--entries_to_exclude") + create_zip_cmd.append(entries_to_exclude) - ctx.actions.run(create_zip_cmd, category = "zip") + ctx.actions.run(cmd_args(create_zip_cmd), category = "zip") return [DefaultInfo(default_output = output)] implemented_rules = { - "zip_file": zip_file_impl, + "zip_file": _zip_file_impl, } extra_attributes = { diff --git a/third-party/macros/rust_third_party.bzl b/third-party/macros/rust_third_party.bzl index f6a0bd3d227..d00ddce8227 100644 --- a/third-party/macros/rust_third_party.bzl +++ b/third-party/macros/rust_third_party.bzl @@ -1,3 +1,10 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + # @nolint def third_party_rust_prebuilt_cxx_library(name, **kwargs): diff --git a/vendir.lock.yml b/vendir.lock.yml index 85baf009218..4469e531859 100644 --- a/vendir.lock.yml +++ b/vendir.lock.yml @@ -2,26 +2,26 @@ apiVersion: vendir.k14s.io/v1alpha1 directories: - contents: - git: - commitTitle: 'buck2: glossary: snake casing nit...' - sha: 7f091ff001f6e1da2ce2d6fdefa940f07d7dc9d0 + commitTitle: Refactor derive(Freeze)... + sha: c2fa90e0c6bfc074362893ca389bbf41af2bd9d6 tags: - - "2024-03-15" + - "2024-10-15" path: . path: prelude - contents: - git: - commitTitle: 'buck2: glossary: snake casing nit...' - sha: 7f091ff001f6e1da2ce2d6fdefa940f07d7dc9d0 + commitTitle: Refactor derive(Freeze)... + sha: c2fa90e0c6bfc074362893ca389bbf41af2bd9d6 tags: - - "2024-03-15" + - "2024-10-15" path: . path: third-party/macros - contents: - git: - commitTitle: 'client: Fix "ctrl-c was pressed" bug...' - sha: f3c685667ef22d0130687003012b6960abec6b3b + commitTitle: Refactor derive(Freeze)... + sha: c2fa90e0c6bfc074362893ca389bbf41af2bd9d6 tags: - - 2023-10-15-425-gf3c685667 + - "2024-10-15" path: . path: third-party/rust/fixups/ring/include kind: LockConfig diff --git a/vendir.yml b/vendir.yml index 07aa872c3a4..53e6b2be820 100644 --- a/vendir.yml +++ b/vendir.yml @@ -1,30 +1,30 @@ apiVersion: vendir.k14s.io/v1alpha1 kind: Config directories: -- path: prelude - contents: - - path: . - git: - url: https://github.com/facebook/buck2.git - ref: "2024-03-15" - includePaths: - - prelude/**/* - newRootPath: prelude -- path: third-party/macros - contents: - - path: . - git: - url: https://github.com/facebook/buck2.git - ref: "2024-03-15" - includePaths: - - shim/third-party/macros/* - newRootPath: shim/third-party/macros -- path: third-party/rust/fixups/ring/include - contents: - - path: . - git: - url: https://github.com/facebook/buck2.git - ref: f3c685667ef22d0130687003012b6960abec6b3b - includePaths: - - shim/third-party/rust/fixups/ring/include/**/* - newRootPath: shim/third-party/rust/fixups/ring/include + - path: prelude + contents: + - path: . + git: + url: https://github.com/facebook/buck2.git + ref: "2024-10-15" + includePaths: + - prelude/**/* + newRootPath: prelude + - path: third-party/macros + contents: + - path: . + git: + url: https://github.com/facebook/buck2.git + ref: "2024-10-15" + includePaths: + - shim/third-party/macros/* + newRootPath: shim/third-party/macros + - path: third-party/rust/fixups/ring/include + contents: + - path: . + git: + url: https://github.com/facebook/buck2.git + ref: "2024-10-15" + includePaths: + - shim/third-party/rust/fixups/ring/include/**/* + newRootPath: shim/third-party/rust/fixups/ring/include